[ 559.808525] env[63379]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63379) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 559.808854] env[63379]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63379) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 559.808977] env[63379]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63379) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 559.809272] env[63379]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 559.910526] env[63379]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63379) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 559.922177] env[63379]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.012s {{(pid=63379) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 560.524037] env[63379]: INFO nova.virt.driver [None req-801adcae-b516-4990-b8a9-58a7b2afddac None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 560.594382] env[63379]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.594575] env[63379]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.594648] env[63379]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63379) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 563.888609] env[63379]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-4d86cee4-0d9b-4671-8521-a7e6782b5896 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.903621] env[63379]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63379) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 563.903815] env[63379]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1ae55047-8dea-4e16-b6b8-ba703f9f296e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.926692] env[63379]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 99584. [ 563.926851] env[63379]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.332s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.927347] env[63379]: INFO nova.virt.vmwareapi.driver [None req-801adcae-b516-4990-b8a9-58a7b2afddac None None] VMware vCenter version: 7.0.3 [ 563.930754] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fb655f-db9b-47fe-b801-49f80e0a8fa7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.947432] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f3f82e-fdc1-4c50-9362-ee438bee27a5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.952952] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df50d9eb-a6a2-4c87-bbbb-ac5105290ca9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.959330] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951f1a3c-67bd-4882-a831-5f0f189ec7cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.971998] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed504fe8-198c-4339-b413-5a22712858bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.977760] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af9b074-14c7-4b3c-89ae-53bf7a2cc0cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.007437] env[63379]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-78493dfe-166b-4f8b-b29f-7226ceb60587 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.012493] env[63379]: DEBUG nova.virt.vmwareapi.driver [None req-801adcae-b516-4990-b8a9-58a7b2afddac None None] Extension org.openstack.compute already exists. {{(pid=63379) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 564.015184] env[63379]: INFO nova.compute.provider_config [None req-801adcae-b516-4990-b8a9-58a7b2afddac None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 564.518990] env[63379]: DEBUG nova.context [None req-801adcae-b516-4990-b8a9-58a7b2afddac None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),2e16cbf6-230e-487b-8e0a-d00744f85bf9(cell1) {{(pid=63379) load_cells /opt/stack/nova/nova/context.py:464}} [ 564.521100] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.521335] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.522027] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.522493] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Acquiring lock "2e16cbf6-230e-487b-8e0a-d00744f85bf9" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.522690] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Lock "2e16cbf6-230e-487b-8e0a-d00744f85bf9" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.523717] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Lock "2e16cbf6-230e-487b-8e0a-d00744f85bf9" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.545041] env[63379]: INFO dbcounter [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Registered counter for database nova_cell0 [ 564.552994] env[63379]: INFO dbcounter [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Registered counter for database nova_cell1 [ 564.556213] env[63379]: DEBUG oslo_db.sqlalchemy.engines [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63379) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 564.556594] env[63379]: DEBUG oslo_db.sqlalchemy.engines [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63379) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 564.561369] env[63379]: ERROR nova.db.main.api [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.561369] env[63379]: result = function(*args, **kwargs) [ 564.561369] env[63379]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 564.561369] env[63379]: return func(*args, **kwargs) [ 564.561369] env[63379]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 564.561369] env[63379]: result = fn(*args, **kwargs) [ 564.561369] env[63379]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 564.561369] env[63379]: return f(*args, **kwargs) [ 564.561369] env[63379]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 564.561369] env[63379]: return db.service_get_minimum_version(context, binaries) [ 564.561369] env[63379]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 564.561369] env[63379]: _check_db_access() [ 564.561369] env[63379]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 564.561369] env[63379]: stacktrace = ''.join(traceback.format_stack()) [ 564.561369] env[63379]: [ 564.562192] env[63379]: ERROR nova.db.main.api [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.562192] env[63379]: result = function(*args, **kwargs) [ 564.562192] env[63379]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 564.562192] env[63379]: return func(*args, **kwargs) [ 564.562192] env[63379]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 564.562192] env[63379]: result = fn(*args, **kwargs) [ 564.562192] env[63379]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 564.562192] env[63379]: return f(*args, **kwargs) [ 564.562192] env[63379]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 564.562192] env[63379]: return db.service_get_minimum_version(context, binaries) [ 564.562192] env[63379]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 564.562192] env[63379]: _check_db_access() [ 564.562192] env[63379]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 564.562192] env[63379]: stacktrace = ''.join(traceback.format_stack()) [ 564.562192] env[63379]: [ 564.562590] env[63379]: WARNING nova.objects.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 564.562711] env[63379]: WARNING nova.objects.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Failed to get minimum service version for cell 2e16cbf6-230e-487b-8e0a-d00744f85bf9 [ 564.563137] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Acquiring lock "singleton_lock" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.563303] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Acquired lock "singleton_lock" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.563544] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Releasing lock "singleton_lock" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.563864] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Full set of CONF: {{(pid=63379) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 564.564015] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ******************************************************************************** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 564.564153] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Configuration options gathered from: {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 564.564315] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 564.564510] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 564.564640] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ================================================================================ {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 564.564852] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] allow_resize_to_same_host = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.565041] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] arq_binding_timeout = 300 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.565184] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] backdoor_port = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.565318] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] backdoor_socket = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.565488] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] block_device_allocate_retries = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.565659] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] block_device_allocate_retries_interval = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.565834] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cert = self.pem {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.566011] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.566200] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute_monitors = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.566376] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] config_dir = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.566572] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] config_drive_format = iso9660 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.566719] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.566893] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] config_source = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.567079] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] console_host = devstack {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.567255] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] control_exchange = nova {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.567422] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cpu_allocation_ratio = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.567587] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] daemon = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.567759] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] debug = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.567921] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] default_access_ip_network_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.568104] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] default_availability_zone = nova {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.568268] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] default_ephemeral_format = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.568433] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] default_green_pool_size = 1000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.568675] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.568845] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] default_schedule_zone = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.569016] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] disk_allocation_ratio = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.569194] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] enable_new_services = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.569376] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] enabled_apis = ['osapi_compute'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.569546] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] enabled_ssl_apis = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.569710] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] flat_injected = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.569873] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] force_config_drive = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.570044] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] force_raw_images = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.570222] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] graceful_shutdown_timeout = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.570419] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] heal_instance_info_cache_interval = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.570606] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] host = cpu-1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.570788] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.570955] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] initial_disk_allocation_ratio = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.571130] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] initial_ram_allocation_ratio = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.571359] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.571531] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instance_build_timeout = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.571694] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instance_delete_interval = 300 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.571862] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instance_format = [instance: %(uuid)s] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.572043] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instance_name_template = instance-%08x {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.572214] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instance_usage_audit = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.572389] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instance_usage_audit_period = month {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.572555] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.572724] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] instances_path = /opt/stack/data/nova/instances {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.572893] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] internal_service_availability_zone = internal {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.573065] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] key = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.573232] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] live_migration_retry_count = 30 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.573412] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_color = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.573575] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_config_append = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.573745] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.573908] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_dir = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.574077] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.574238] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_options = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.574410] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_rotate_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.574583] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_rotate_interval_type = days {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.574752] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] log_rotation_type = none {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.574883] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.575017] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.575209] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.575389] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.575519] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.575685] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] long_rpc_timeout = 1800 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.575849] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] max_concurrent_builds = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.576014] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] max_concurrent_live_migrations = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.576183] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] max_concurrent_snapshots = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.576347] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] max_local_block_devices = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.576516] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] max_logfile_count = 30 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.576705] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] max_logfile_size_mb = 200 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.576873] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] maximum_instance_delete_attempts = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.577055] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] metadata_listen = 0.0.0.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.577232] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] metadata_listen_port = 8775 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.577408] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] metadata_workers = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.577576] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] migrate_max_retries = -1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.577747] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] mkisofs_cmd = genisoimage {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.577958] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] my_block_storage_ip = 10.180.1.21 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.578109] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] my_ip = 10.180.1.21 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.578279] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] network_allocate_retries = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.578460] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.578631] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] osapi_compute_listen = 0.0.0.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.578800] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] osapi_compute_listen_port = 8774 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.578969] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] osapi_compute_unique_server_name_scope = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.579153] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] osapi_compute_workers = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.579330] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] password_length = 12 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.579499] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] periodic_enable = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.579662] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] periodic_fuzzy_delay = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.579834] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] pointer_model = usbtablet {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.580053] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] preallocate_images = none {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.580184] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] publish_errors = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.580319] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] pybasedir = /opt/stack/nova {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.580480] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ram_allocation_ratio = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.580638] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] rate_limit_burst = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.580806] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] rate_limit_except_level = CRITICAL {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.580969] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] rate_limit_interval = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.581145] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] reboot_timeout = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.581309] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] reclaim_instance_interval = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.581470] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] record = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.581640] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] reimage_timeout_per_gb = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.581811] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] report_interval = 120 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.581972] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] rescue_timeout = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.582146] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] reserved_host_cpus = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.582310] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] reserved_host_disk_mb = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.582474] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] reserved_host_memory_mb = 512 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.582635] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] reserved_huge_pages = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.582796] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] resize_confirm_window = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.582956] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] resize_fs_using_block_device = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.583127] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] resume_guests_state_on_host_boot = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.583300] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.583466] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] rpc_response_timeout = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.583628] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] run_external_periodic_tasks = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.583799] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] running_deleted_instance_action = reap {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.583962] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] running_deleted_instance_poll_interval = 1800 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.584137] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] running_deleted_instance_timeout = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.584320] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler_instance_sync_interval = 120 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.584497] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_down_time = 720 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.584667] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] servicegroup_driver = db {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.584826] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] shell_completion = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.584987] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] shelved_offload_time = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.585159] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] shelved_poll_interval = 3600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.585330] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] shutdown_timeout = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.585494] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] source_is_ipv6 = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.585652] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ssl_only = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.585924] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.586086] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] sync_power_state_interval = 600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.586256] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] sync_power_state_pool_size = 1000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.586430] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] syslog_log_facility = LOG_USER {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.586609] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] tempdir = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.586784] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] timeout_nbd = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.586956] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] transport_url = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.587136] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] update_resources_interval = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.587301] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] use_cow_images = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.587464] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] use_eventlog = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.587627] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] use_journal = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.587791] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] use_json = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.587952] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] use_rootwrap_daemon = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.588126] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] use_stderr = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.588290] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] use_syslog = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.588450] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vcpu_pin_set = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.588620] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plugging_is_fatal = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.588788] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plugging_timeout = 300 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.588953] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] virt_mkfs = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.589130] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] volume_usage_poll_interval = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.589296] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] watch_log_file = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.589468] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] web = /usr/share/spice-html5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 564.589654] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.589823] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.589986] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.590171] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_concurrency.disable_process_locking = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.590720] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.590917] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.591107] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.591291] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.591469] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.591640] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.591826] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.auth_strategy = keystone {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.591997] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.compute_link_prefix = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.592192] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.592372] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.dhcp_domain = novalocal {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.592548] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.enable_instance_password = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.592716] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.glance_link_prefix = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.592884] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.593069] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.593240] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.instance_list_per_project_cells = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.593407] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.list_records_by_skipping_down_cells = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.593574] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.local_metadata_per_cell = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.593744] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.max_limit = 1000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.593912] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.metadata_cache_expiration = 15 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.594099] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.neutron_default_tenant_id = default {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.594319] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.response_validation = warn {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.594507] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.use_neutron_default_nets = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.594681] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.594846] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.595026] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.595210] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.595384] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.vendordata_dynamic_targets = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.595550] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.vendordata_jsonfile_path = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.595732] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.595924] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.backend = dogpile.cache.memcached {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.596107] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.backend_argument = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.596274] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.backend_expiration_time = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.596450] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.config_prefix = cache.oslo {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.596649] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.dead_timeout = 60.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.596825] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.debug_cache_backend = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.596991] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.enable_retry_client = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.597175] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.enable_socket_keepalive = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.597352] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.enabled = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.597521] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.enforce_fips_mode = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.597690] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.expiration_time = 600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.597855] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.hashclient_retry_attempts = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.598033] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.hashclient_retry_delay = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.598203] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_dead_retry = 300 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.598366] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_password = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.598532] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.598695] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.598858] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_pool_maxsize = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.599030] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.599201] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_sasl_enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.599382] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.599553] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_socket_timeout = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.599716] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.memcache_username = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.599886] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.proxies = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.600066] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.redis_db = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.600259] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.redis_password = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.600449] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.redis_sentinel_service_name = mymaster {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.600631] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.600804] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.redis_server = localhost:6379 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.600971] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.redis_socket_timeout = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.601149] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.redis_username = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.601319] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.retry_attempts = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.601489] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.retry_delay = 0.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.601654] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.socket_keepalive_count = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.601818] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.socket_keepalive_idle = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.601982] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.socket_keepalive_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.602158] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.tls_allowed_ciphers = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.602320] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.tls_cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.602482] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.tls_certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.602645] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.tls_enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.602804] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cache.tls_keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.602974] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.603165] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.auth_type = password {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.603330] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.603510] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.catalog_info = volumev3::publicURL {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.603674] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.603842] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.604015] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.cross_az_attach = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.604220] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.debug = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.604391] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.endpoint_template = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.604562] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.http_retries = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.604728] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.604888] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.605076] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.os_region_name = RegionOne {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.605248] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.605409] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cinder.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.605581] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.605745] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.cpu_dedicated_set = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.605905] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.cpu_shared_set = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.606084] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.image_type_exclude_list = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.606253] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.606417] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.max_concurrent_disk_ops = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.606580] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.max_disk_devices_to_attach = -1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.606742] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.606910] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.607084] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.resource_provider_association_refresh = 300 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.607250] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.607414] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.shutdown_retry_interval = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.607593] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.607771] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] conductor.workers = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.607952] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] console.allowed_origins = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.608128] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] console.ssl_ciphers = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.608303] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] console.ssl_minimum_version = default {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.608474] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] consoleauth.enforce_session_timeout = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.608641] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] consoleauth.token_ttl = 600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.608812] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.608971] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.609151] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.609311] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.connect_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.609474] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.connect_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.609632] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.endpoint_override = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.609794] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.609952] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.610124] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.max_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.610282] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.min_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.610440] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.region_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.610599] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.retriable_status_codes = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.610759] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.610928] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.service_type = accelerator {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.611101] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.611262] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.status_code_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.611424] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.status_code_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.611582] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.611761] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.611921] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] cyborg.version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.612133] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.backend = sqlalchemy {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.612324] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.connection = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.612498] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.connection_debug = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.612667] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.connection_parameters = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.612836] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.connection_recycle_time = 3600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.613009] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.connection_trace = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.613182] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.db_inc_retry_interval = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.613350] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.db_max_retries = 20 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.613512] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.db_max_retry_interval = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.613678] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.db_retry_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.613841] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.max_overflow = 50 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.614020] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.max_pool_size = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.614186] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.max_retries = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.614387] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.614562] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.mysql_wsrep_sync_wait = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.614723] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.pool_timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.614886] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.retry_interval = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.615057] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.slave_connection = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.615228] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.sqlite_synchronous = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.615393] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] database.use_db_reconnect = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.615572] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.backend = sqlalchemy {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.615744] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.connection = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.615910] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.connection_debug = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.616089] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.connection_parameters = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.616260] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.connection_recycle_time = 3600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.616423] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.connection_trace = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.616586] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.db_inc_retry_interval = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.616748] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.db_max_retries = 20 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.616912] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.db_max_retry_interval = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.617082] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.db_retry_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.617249] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.max_overflow = 50 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.617415] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.max_pool_size = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.617578] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.max_retries = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.617745] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.617904] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.618081] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.pool_timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.618246] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.retry_interval = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.618407] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.slave_connection = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.618569] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] api_database.sqlite_synchronous = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.618743] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] devices.enabled_mdev_types = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.618919] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.619105] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ephemeral_storage_encryption.default_format = luks {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.619276] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ephemeral_storage_encryption.enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.619444] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.619612] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.api_servers = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.619773] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.619935] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.620110] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.620270] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.connect_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.620433] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.connect_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.620596] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.debug = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.620761] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.default_trusted_certificate_ids = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.620923] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.enable_certificate_validation = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.621097] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.enable_rbd_download = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.621258] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.endpoint_override = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.621424] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.621584] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.621741] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.max_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.621895] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.min_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.622096] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.num_retries = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.622376] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.rbd_ceph_conf = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.622609] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.rbd_connect_timeout = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.622795] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.rbd_pool = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.622970] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.rbd_user = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.623154] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.region_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.623321] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.retriable_status_codes = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.623487] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.623660] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.service_type = image {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.623833] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.623995] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.status_code_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.624174] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.status_code_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.624361] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.624551] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.624721] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.verify_glance_signatures = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.624884] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] glance.version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.625065] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] guestfs.debug = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.625237] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] mks.enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.625581] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.625772] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] image_cache.manager_interval = 2400 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.625943] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] image_cache.precache_concurrency = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.626130] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] image_cache.remove_unused_base_images = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.626305] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.626477] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.626654] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] image_cache.subdirectory_name = _base {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.626833] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.api_max_retries = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.627008] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.api_retry_interval = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.627178] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.627341] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.auth_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.627503] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.627662] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.627823] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.627984] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.conductor_group = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.628157] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.connect_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.628319] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.connect_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.628481] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.endpoint_override = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.628644] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.628800] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.628959] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.max_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.629131] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.min_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.629298] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.peer_list = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.629461] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.region_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.629619] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.retriable_status_codes = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.629781] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.serial_console_state_timeout = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.629941] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.630121] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.service_type = baremetal {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.630284] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.shard = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.630449] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.630607] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.status_code_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.630766] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.status_code_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.630922] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.631150] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.631272] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ironic.version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.631454] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.631628] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] key_manager.fixed_key = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.631809] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.631973] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.barbican_api_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.632168] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.barbican_endpoint = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.632355] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.barbican_endpoint_type = public {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.632520] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.barbican_region_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.632680] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.632844] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.633014] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.633189] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.633349] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.633514] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.number_of_retries = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.633677] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.retry_delay = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.633840] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.send_service_user_token = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.634020] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.634219] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.634405] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.verify_ssl = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.634574] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican.verify_ssl_path = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.634743] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.634905] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.auth_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.635078] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.635260] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.635439] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.635602] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.635759] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.635922] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.636092] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] barbican_service_user.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.636266] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.approle_role_id = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.636428] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.approle_secret_id = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.636599] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.kv_mountpoint = secret {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.636758] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.kv_path = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.636922] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.kv_version = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.637091] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.namespace = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.637262] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.root_token_id = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.637482] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.ssl_ca_crt_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.637665] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.timeout = 60.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.637833] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.use_ssl = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.638022] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.638202] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.638371] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.auth_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.638533] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.638697] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.638864] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.639034] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.connect_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.639203] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.connect_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.639369] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.endpoint_override = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.639532] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.639692] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.639850] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.max_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.640015] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.min_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.640195] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.region_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.640421] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.retriable_status_codes = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.640601] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.640866] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.service_type = identity {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.641069] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.641262] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.status_code_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.641404] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.status_code_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.641569] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.641756] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.641918] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] keystone.version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.642154] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.connection_uri = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.642334] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.cpu_mode = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.642507] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.cpu_model_extra_flags = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.642678] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.cpu_models = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.642851] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.cpu_power_governor_high = performance {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.643032] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.cpu_power_governor_low = powersave {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.643203] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.cpu_power_management = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.643378] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.643541] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.device_detach_attempts = 8 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.643704] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.device_detach_timeout = 20 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.643867] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.disk_cachemodes = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.644036] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.disk_prefix = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.644250] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.enabled_perf_events = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.644429] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.file_backed_memory = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.644600] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.gid_maps = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.644760] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.hw_disk_discard = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.644923] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.hw_machine_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.645112] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.images_rbd_ceph_conf = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.645293] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.645478] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.645656] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.images_rbd_glance_store_name = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.645831] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.images_rbd_pool = rbd {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.646017] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.images_type = default {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.646184] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.images_volume_group = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.646381] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.inject_key = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.646556] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.inject_partition = -2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.646720] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.inject_password = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.646885] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.iscsi_iface = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.647065] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.iser_use_multipath = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.647236] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_bandwidth = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.647403] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.647568] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_downtime = 500 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.647733] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.647895] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.648068] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_inbound_addr = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.648240] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.648406] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_permit_post_copy = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.648568] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_scheme = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.648745] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_timeout_action = abort {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.648909] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_tunnelled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.649080] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_uri = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.649249] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.live_migration_with_native_tls = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.649413] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.max_queues = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.649575] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.649805] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.649970] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.nfs_mount_options = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.650280] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.650469] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.650637] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.num_iser_scan_tries = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.650809] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.num_memory_encrypted_guests = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.650975] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.651154] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.num_pcie_ports = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.651377] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.num_volume_scan_tries = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.651488] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.pmem_namespaces = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.651651] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.quobyte_client_cfg = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.651951] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.652153] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rbd_connect_timeout = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.652341] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.652512] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.652673] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rbd_secret_uuid = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.652832] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rbd_user = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.652995] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.653182] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.remote_filesystem_transport = ssh {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.653345] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rescue_image_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.653504] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rescue_kernel_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.653662] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rescue_ramdisk_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.653833] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.653993] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.rx_queue_size = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.654178] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.smbfs_mount_options = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.654494] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.654684] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.snapshot_compression = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.654852] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.snapshot_image_format = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.655097] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.655272] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.sparse_logical_volumes = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.655441] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.swtpm_enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.655612] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.swtpm_group = tss {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.655780] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.swtpm_user = tss {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.655950] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.sysinfo_serial = unique {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.656126] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.tb_cache_size = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.656287] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.tx_queue_size = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.656453] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.uid_maps = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.656615] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.use_virtio_for_bridges = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.656785] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.virt_type = kvm {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.656953] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.volume_clear = zero {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.657129] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.volume_clear_size = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.657295] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.volume_use_multipath = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.657455] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.vzstorage_cache_path = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.657623] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.657790] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.vzstorage_mount_group = qemu {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.657955] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.vzstorage_mount_opts = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.658135] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.658443] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.658632] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.vzstorage_mount_user = stack {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.658805] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.658987] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.659179] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.auth_type = password {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.659346] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.659507] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.659671] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.659830] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.connect_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.659988] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.connect_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.660172] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.default_floating_pool = public {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.660335] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.endpoint_override = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.660500] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.extension_sync_interval = 600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.660665] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.http_retries = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.660828] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.660988] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.661162] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.max_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.661335] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.661495] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.min_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.661664] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.ovs_bridge = br-int {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.661835] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.physnets = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.662013] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.region_name = RegionOne {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.662204] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.retriable_status_codes = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.662384] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.service_metadata_proxy = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.662550] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.662720] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.service_type = network {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.662884] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.663058] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.status_code_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.663224] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.status_code_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.663388] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.663583] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.663745] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] neutron.version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.663919] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] notifications.bdms_in_notifications = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.664109] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] notifications.default_level = INFO {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.664317] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] notifications.notification_format = unversioned {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.664493] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] notifications.notify_on_state_change = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.664670] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.664848] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] pci.alias = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.665027] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] pci.device_spec = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.665197] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] pci.report_in_placement = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.665369] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.665541] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.auth_type = password {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.665708] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.665869] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.666051] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.666246] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.666413] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.connect_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.666575] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.connect_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.666739] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.default_domain_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.666898] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.default_domain_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.667065] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.domain_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.667229] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.domain_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.667390] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.endpoint_override = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.667553] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.667711] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.667869] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.max_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.668035] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.min_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.668210] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.password = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.668372] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.project_domain_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.668544] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.project_domain_name = Default {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.668710] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.project_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.668881] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.project_name = service {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.669062] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.region_name = RegionOne {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.669230] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.retriable_status_codes = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.669393] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.669564] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.service_type = placement {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.669729] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.669889] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.status_code_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.670078] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.status_code_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.670264] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.system_scope = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.670456] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.670624] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.trust_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.670782] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.user_domain_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.670951] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.user_domain_name = Default {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.671127] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.user_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.671305] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.username = nova {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.671488] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.671650] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] placement.version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.671828] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.cores = 20 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.671992] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.count_usage_from_placement = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.672179] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.672347] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.injected_file_content_bytes = 10240 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.672513] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.injected_file_path_length = 255 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.672678] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.injected_files = 5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.672842] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.instances = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.673015] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.key_pairs = 100 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.673196] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.metadata_items = 128 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.673365] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.ram = 51200 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.673529] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.recheck_quota = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.673697] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.server_group_members = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.673862] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] quota.server_groups = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.674066] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.674281] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.674460] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.image_metadata_prefilter = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.674626] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.674789] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.max_attempts = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.674953] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.max_placement_results = 1000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.675135] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.675308] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.query_placement_for_image_type_support = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.675476] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.675648] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] scheduler.workers = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.675821] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.675993] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.676189] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.676362] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.676535] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.676726] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.676901] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.677110] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.677287] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.host_subset_size = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.677457] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.677620] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.677785] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.677950] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.isolated_hosts = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.678154] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.isolated_images = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.678333] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.678499] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.678664] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.678827] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.pci_in_placement = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.678993] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.679175] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.679340] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.679501] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.679679] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.679865] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.680046] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.track_instance_changes = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.680230] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.680404] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] metrics.required = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.680568] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] metrics.weight_multiplier = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.680737] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.680897] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] metrics.weight_setting = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.681223] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.681402] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] serial_console.enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.681583] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] serial_console.port_range = 10000:20000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.681759] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.681931] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.682140] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] serial_console.serialproxy_port = 6083 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.682325] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.682507] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.auth_type = password {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.682673] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.682859] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.683040] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.683208] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.683375] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.683547] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.send_service_user_token = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.683724] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.683884] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] service_user.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.684067] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.agent_enabled = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.684253] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.684572] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.684776] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.684951] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.html5proxy_port = 6082 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.685130] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.image_compression = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.685319] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.jpeg_compression = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.685487] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.playback_compression = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.685651] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.require_secure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.685821] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.server_listen = 127.0.0.1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.685988] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.686162] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.streaming_mode = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.686323] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] spice.zlib_compression = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.686490] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] upgrade_levels.baseapi = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.686662] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] upgrade_levels.compute = auto {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.686824] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] upgrade_levels.conductor = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.686982] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] upgrade_levels.scheduler = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.687160] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.687323] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.auth_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.687484] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.687643] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.687805] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.687965] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.688137] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.688326] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.688493] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vendordata_dynamic_auth.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.688670] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.api_retry_count = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.688832] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.ca_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.689013] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.cache_prefix = devstack-image-cache {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.689192] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.cluster_name = testcl1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.689363] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.connection_pool_size = 10 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.689522] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.console_delay_seconds = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.689690] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.datastore_regex = ^datastore.* {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.689897] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.690085] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.host_password = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.690257] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.host_port = 443 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.690427] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.host_username = administrator@vsphere.local {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.690597] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.insecure = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.690758] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.integration_bridge = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.690920] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.maximum_objects = 100 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.691088] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.pbm_default_policy = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.691268] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.pbm_enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.691447] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.pbm_wsdl_location = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.691621] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.691805] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.serial_port_proxy_uri = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.691940] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.serial_port_service_uri = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.692124] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.task_poll_interval = 0.5 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.692303] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.use_linked_clone = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.692473] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.vnc_keymap = en-us {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.692639] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.vnc_port = 5900 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.692801] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vmware.vnc_port_total = 10000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.692989] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.auth_schemes = ['none'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.693178] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.693480] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.693668] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.693842] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.novncproxy_port = 6080 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.694050] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.server_listen = 127.0.0.1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.694267] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.694457] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.vencrypt_ca_certs = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.694625] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.vencrypt_client_cert = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.694789] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vnc.vencrypt_client_key = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.694971] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.695153] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.disable_deep_image_inspection = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.695320] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.695481] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.695642] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.695805] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.disable_rootwrap = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.695966] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.enable_numa_live_migration = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.696144] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.696309] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.696472] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.696634] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.libvirt_disable_apic = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.696795] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.696958] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.697134] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.697307] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.697488] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.697652] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.697824] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.697989] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.698164] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.698330] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.698518] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.698689] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.client_socket_timeout = 900 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.698855] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.default_pool_size = 1000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.699033] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.keep_alive = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.699206] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.max_header_line = 16384 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.699371] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.secure_proxy_ssl_header = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.699530] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.ssl_ca_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.699688] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.ssl_cert_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.699847] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.ssl_key_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.700015] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.tcp_keepidle = 600 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.700196] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.700383] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] zvm.ca_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.700561] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] zvm.cloud_connector_url = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.700849] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.701034] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] zvm.reachable_timeout = 300 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.701226] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.enforce_new_defaults = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.701601] env[63379]: WARNING oslo_config.cfg [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 564.701784] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.enforce_scope = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.701960] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.policy_default_rule = default {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.702154] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.702332] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.policy_file = policy.yaml {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.702511] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.702674] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.702835] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.702994] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.703173] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.703351] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_policy.remote_timeout = 60.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.703540] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.703721] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.703897] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.connection_string = messaging:// {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.704078] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.enabled = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.704271] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.es_doc_type = notification {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.704445] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.es_scroll_size = 10000 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.704615] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.es_scroll_time = 2m {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.704781] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.filter_error_trace = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.704950] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.hmac_keys = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.705132] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.sentinel_service_name = mymaster {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.705301] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.socket_timeout = 0.1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.705467] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.trace_requests = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.705627] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler.trace_sqlalchemy = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.705809] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler_jaeger.process_tags = {} {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.705970] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler_jaeger.service_name_prefix = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.706149] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] profiler_otlp.service_name_prefix = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.706317] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] remote_debug.host = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.706502] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] remote_debug.port = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.706686] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.706851] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.707024] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.707194] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.707360] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.707524] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.707685] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.707847] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.708015] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.708192] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.708359] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.708532] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.708700] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.708871] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.709051] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.709225] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.709396] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.709591] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.709760] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.709921] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.710097] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.710265] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.710441] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.710589] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.710747] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.710906] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.711077] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.711240] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.711411] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.711576] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.ssl = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.711746] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.711915] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.712091] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.712267] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.712458] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.ssl_version = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.712638] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.712830] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.712999] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_notifications.retry = -1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.713198] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.713381] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_messaging_notifications.transport_url = **** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.713559] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.auth_section = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.713723] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.auth_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.713884] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.cafile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.714052] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.certfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.714241] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.collect_timing = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.714413] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.connect_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.714573] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.connect_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.714731] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.endpoint_id = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.714902] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.endpoint_interface = publicURL {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.715076] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.endpoint_override = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.715262] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.endpoint_region_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.715435] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.endpoint_service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.715595] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.endpoint_service_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.715760] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.insecure = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.715913] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.keyfile = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.716083] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.max_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.716242] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.min_version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.716400] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.region_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.716559] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.retriable_status_codes = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.716715] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.service_name = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.716869] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.service_type = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.717038] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.split_loggers = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.717204] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.status_code_retries = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.717361] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.status_code_retry_delay = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.717512] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.timeout = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.717664] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.valid_interfaces = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.717820] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_limit.version = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.717980] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_reports.file_event_handler = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.718155] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.718335] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] oslo_reports.log_dir = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.718512] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.718671] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.718828] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.718992] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.719172] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.719331] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.719500] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.719656] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_ovs_privileged.group = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.719812] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.719973] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.720149] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.720307] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] vif_plug_ovs_privileged.user = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.720478] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.flat_interface = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.720655] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.720826] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.720996] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.721181] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.721371] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.721545] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.721709] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.721886] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.722068] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_ovs.isolate_vif = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.722243] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.722410] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.722577] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.722745] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_ovs.ovsdb_interface = native {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.722905] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] os_vif_ovs.per_port_bridge = False {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.723086] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] privsep_osbrick.capabilities = [21] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.723250] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] privsep_osbrick.group = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.723410] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] privsep_osbrick.helper_command = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.723574] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.723741] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.723899] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] privsep_osbrick.user = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.724084] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.724272] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] nova_sys_admin.group = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.724444] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] nova_sys_admin.helper_command = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.724611] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.724775] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.724937] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] nova_sys_admin.user = None {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 564.725077] env[63379]: DEBUG oslo_service.service [None req-c7128b31-bbda-404c-9990-a47f3ba83019 None None] ******************************************************************************** {{(pid=63379) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 564.725588] env[63379]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 565.228993] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Getting list of instances from cluster (obj){ [ 565.228993] env[63379]: value = "domain-c8" [ 565.228993] env[63379]: _type = "ClusterComputeResource" [ 565.228993] env[63379]: } {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 565.230185] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87bbfc8-1c85-4f5c-b83c-71a8f9114984 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.239223] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Got total of 0 instances {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 565.239743] env[63379]: WARNING nova.virt.vmwareapi.driver [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 565.240220] env[63379]: INFO nova.virt.node [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Generated node identity cf478c89-515f-4372-b90f-4868ab56e978 [ 565.240459] env[63379]: INFO nova.virt.node [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Wrote node identity cf478c89-515f-4372-b90f-4868ab56e978 to /opt/stack/data/n-cpu-1/compute_id [ 565.743223] env[63379]: WARNING nova.compute.manager [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Compute nodes ['cf478c89-515f-4372-b90f-4868ab56e978'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 566.750069] env[63379]: INFO nova.compute.manager [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 567.754396] env[63379]: WARNING nova.compute.manager [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 567.754665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.754908] env[63379]: DEBUG oslo_concurrency.lockutils [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.755073] env[63379]: DEBUG oslo_concurrency.lockutils [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.755230] env[63379]: DEBUG nova.compute.resource_tracker [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 567.757625] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce451aa0-cdfe-47c2-b8a8-dbe58974bf03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.765492] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e374bb50-cedf-4c9b-9ba3-ca39ee9967d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.778689] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f708303-94bd-45d5-b395-7533d4563963 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.784877] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4dca00f-ee83-4bec-9c3e-6634b54fd7f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.813305] env[63379]: DEBUG nova.compute.resource_tracker [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181389MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 567.813305] env[63379]: DEBUG oslo_concurrency.lockutils [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.813474] env[63379]: DEBUG oslo_concurrency.lockutils [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.316390] env[63379]: WARNING nova.compute.resource_tracker [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] No compute node record for cpu-1:cf478c89-515f-4372-b90f-4868ab56e978: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cf478c89-515f-4372-b90f-4868ab56e978 could not be found. [ 568.820296] env[63379]: INFO nova.compute.resource_tracker [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: cf478c89-515f-4372-b90f-4868ab56e978 [ 570.328392] env[63379]: DEBUG nova.compute.resource_tracker [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 570.328733] env[63379]: DEBUG nova.compute.resource_tracker [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 570.497174] env[63379]: INFO nova.scheduler.client.report [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] [req-1ee6c83a-0d12-4645-b143-549090497e98] Created resource provider record via placement API for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 570.513879] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20e2075-2b44-43c7-b897-dbb17c2421d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.522519] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30f3133-defd-4448-9325-e90f325a4a88 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.551186] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab75696-3007-4172-bdc1-09e7436c44aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.558206] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115728e9-879d-4311-b38c-2d9d4781fa48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.571034] env[63379]: DEBUG nova.compute.provider_tree [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 571.106688] env[63379]: DEBUG nova.scheduler.client.report [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 571.106924] env[63379]: DEBUG nova.compute.provider_tree [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 0 to 1 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 571.107079] env[63379]: DEBUG nova.compute.provider_tree [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 571.158215] env[63379]: DEBUG nova.compute.provider_tree [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 1 to 2 during operation: update_traits {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 571.662563] env[63379]: DEBUG nova.compute.resource_tracker [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 571.663306] env[63379]: DEBUG oslo_concurrency.lockutils [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.850s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.663616] env[63379]: DEBUG nova.service [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Creating RPC server for service compute {{(pid=63379) start /opt/stack/nova/nova/service.py:186}} [ 571.679227] env[63379]: DEBUG nova.service [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] Join ServiceGroup membership for this service compute {{(pid=63379) start /opt/stack/nova/nova/service.py:203}} [ 571.680277] env[63379]: DEBUG nova.servicegroup.drivers.db [None req-88efa015-9ac0-4805-97c9-b9be2b311867 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63379) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 573.681456] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_power_states {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 574.185346] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Getting list of instances from cluster (obj){ [ 574.185346] env[63379]: value = "domain-c8" [ 574.185346] env[63379]: _type = "ClusterComputeResource" [ 574.185346] env[63379]: } {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 574.186490] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a84bbb7-d7c6-4a34-b357-646c95a4ae16 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.195280] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Got total of 0 instances {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 574.195512] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 574.195832] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Getting list of instances from cluster (obj){ [ 574.195832] env[63379]: value = "domain-c8" [ 574.195832] env[63379]: _type = "ClusterComputeResource" [ 574.195832] env[63379]: } {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 574.196699] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a203ce0-017f-4f77-b779-a021300ca432 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.204276] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Got total of 0 instances {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 619.972863] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.973278] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.973471] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 619.973471] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 620.479678] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 620.479927] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.480220] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.480417] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.480704] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.481028] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.481358] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.481636] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 620.481889] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 620.985107] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.985493] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.985535] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.985692] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 620.986603] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c6b35f-38de-49c1-9279-36342faaf2c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.994617] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a18fd8-c0b3-4ea9-9933-bb6e58578c02 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.007867] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf7e4f4-98d8-40f9-8652-ae94757fd3d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.013997] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61891448-3ae0-494e-a76c-3b1e4d58ad9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.042449] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181391MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 621.042602] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.042773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.061680] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 622.061985] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 622.075587] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6929dc-53be-4570-b4d3-daa7dfdf03a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.083810] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a227820c-7865-4036-9894-dffe6463f431 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.112481] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d84fdd-3a79-4088-9a42-19ea1e6259d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.119264] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a037a8e-ab86-4931-b6a1-c4cd0f4e4f93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.131617] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.634840] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.140837] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 623.141241] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.098s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.127791] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 683.128250] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 683.636023] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 683.636023] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 683.636023] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 684.135715] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 684.136142] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.136142] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.136262] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.136407] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.136553] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.136697] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.136864] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 684.137023] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 684.640742] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.640964] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.641148] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.641305] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 684.642340] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8921f961-8638-4482-aacb-115432823795 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.651046] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc0373f-b2b1-42b5-8aae-f39112c91844 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.664722] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa121b6e-1fc6-4ec4-8427-a857ee788bec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.670753] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e600502-e472-4ad6-a386-9eda77d111ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.698852] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181389MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 684.698986] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.699176] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.716420] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 685.716699] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 685.729731] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e47beb6-2e94-49b7-8292-1157ab315ffa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.737510] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600bd98e-eba6-4aef-a0c6-05983124c526 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.766153] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc4b106-3807-4a3c-9f71-fc1c16581fe8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.773207] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7557900f-4e91-45c5-81e4-48bc3ac76bb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.785733] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.288809] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 686.290103] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 686.290291] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.292270] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.292688] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.292688] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 746.292812] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 746.795254] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 746.795473] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.795652] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.795820] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.795967] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.796124] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.796270] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 746.796398] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 746.796535] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.300020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.300430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.300430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.300592] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 747.301505] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463ca681-581d-40d0-89de-1d0df638eec9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.309853] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362a5e13-ea7a-47ac-991e-db37b699fe59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.323657] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ef2e97-7d7a-4cc5-96df-578280db76d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.329635] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619afe48-c80a-4c4a-bbf3-124dc6edda61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.361295] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181389MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 747.361454] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.361615] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.379285] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 748.379540] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 748.392300] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70028c1-bc9d-40b0-8098-beae390d49a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.399826] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd02a2a2-6a0b-4011-99fe-158d2dfb9922 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.428726] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b4852d-c137-4f1c-a8ec-fe93ab1bdd14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.435491] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879eb0ba-f49b-471e-bea6-7ce16877d9b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.448067] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.950715] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 748.952015] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 748.952210] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.620708] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.621130] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.125950] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.126131] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 804.126304] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 804.628970] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 804.629423] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.629423] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.629573] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 805.132320] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.132581] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.132719] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.132877] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 805.133837] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a449bc7-4c24-4f44-951c-265a55841fe2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.142275] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71777ee2-f513-4e14-9a1b-812ed7b0fbe8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.155818] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abaf42a7-53f0-4ab6-9b20-74b04444716e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.162052] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9195a8c1-1ced-4ed8-b06e-62f70fbbd400 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.189833] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181368MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 805.190053] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.190202] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.207668] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 806.207974] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 806.220373] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2790b0-7895-49dd-a216-3e84f017e320 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.228614] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f073c24f-0e1e-4515-a1be-0fb75f3844e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.256635] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e03239c-3938-4a7b-b506-081c11123a77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.263100] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def9052d-93e9-40fb-87a5-534973924e84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.275533] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.778713] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.780016] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 806.780218] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.114794] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.115027] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.115193] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.115345] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.115483] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 859.964198] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.964586] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11274}} [ 860.467636] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] There are 0 instances to clean {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11283}} [ 860.467874] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.468026] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances with incomplete migration {{(pid=63379) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11312}} [ 860.970780] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.473486] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.976291] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.976602] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.976761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.976923] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 862.977817] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23b44c5-d93b-4dec-aac0-1032c3d9e276 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.985961] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3df20c7-1a71-46da-9c54-8ea264fa5cec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.000183] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7faaaf-f642-4325-936d-daef2f64842a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.006303] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aef46d7-07f2-4d42-a45a-24e922be47f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.033912] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181382MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 863.034054] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.034242] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.052308] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 864.052574] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 864.067065] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6de138-7417-4697-9672-fe06325b00c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.074359] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ee856b-ecba-4871-b849-61bb755354e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.102767] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91583576-16aa-4e59-9c0e-617ef50db8b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.109769] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bce8963-faf7-4194-b863-cbc61b27766d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.122165] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.625717] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.627072] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 864.627259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.113717] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.115195] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.115195] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 866.115195] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 866.617938] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 866.618313] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.618533] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.618723] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.618882] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.619048] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.619188] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 867.964941] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.960615] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.465468] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.968674] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.969067] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.969253] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.969338] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 921.970808] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f655795-5850-47d9-9a2d-d0cb2be328cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.979591] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765c1320-eb3c-4b01-a26c-477db3b9e285 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.993707] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6880188-f688-4d50-8805-4bdc1a7c671a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.000163] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d67e00-176a-41df-b0d0-34c657a8ebb2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.028587] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181370MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 922.028746] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.028927] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.061403] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 923.061645] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 923.078680] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 923.092578] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 923.092751] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 923.104332] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 923.118952] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 923.129599] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635154be-d3de-46a9-99ae-4f841617af85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.136673] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8286e4-fcfb-4360-9e3a-aec93db8fcc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.166099] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f28fee-7d2a-4b07-8614-96f59f10439c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.172953] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a201e6b-9c44-45c7-996f-cd5c8012373d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.185441] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.689087] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 923.690393] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 923.690567] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.662s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.690274] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.690878] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.691046] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 926.691172] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 927.194757] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 927.195053] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.195316] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.195316] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.195482] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.195592] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 927.964064] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.964399] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.964480] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.467566] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.467847] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.468031] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.468200] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 981.469139] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14ec001-4283-495f-afdf-b967b4cc819a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.478428] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e4b4da-805c-47e8-ae79-5ca262d76119 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.492317] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fd12ea-b5bf-45ba-be7b-a7c44cfb9f09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.498443] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea5311d-4a21-4a04-9eec-4fe2826588ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.526283] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181390MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 981.526418] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.526601] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.547138] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 982.547367] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 982.559906] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9131ec0e-8974-4b5e-9b42-c6babffca0e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.567114] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717c0822-ddcf-4da0-a509-8d2217ab30e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.596075] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa3a871-bf07-4088-af5c-ef26468b8f49 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.602837] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d93584a-1858-474e-a822-e8a40e7a3516 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.615291] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.118707] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 983.120050] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 983.120232] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.115862] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.116466] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.116466] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 987.116466] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 987.619323] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 987.619699] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.620086] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.620280] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 987.964942] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.965201] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.965375] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.964598] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.964186] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.467935] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.468204] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.468373] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.468527] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1042.469455] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47803392-7477-4ffd-a2f4-50c0600d0e2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.477786] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46585be0-2829-485d-94ee-fc6fb240fb3e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.492267] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21492b5-dddc-444e-8402-694a4998d74a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.498118] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c40f356-89a8-4903-a077-9245876c9fde {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.525487] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181389MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1042.525621] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.525797] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.542683] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1043.542910] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1043.555898] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5259fad5-c8c7-44b9-9007-81bc98690093 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.563525] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c05005-2352-4f88-bd20-ba871d1b05bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.593389] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93de2af-7df2-4335-ac1f-cefd0c80c714 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.600255] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98244e26-a028-490c-bae2-6dc6a330d6e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.612887] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.115986] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1044.117275] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1044.117457] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.113168] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.113544] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.617763] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.617959] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1048.618114] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1049.120906] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1049.121277] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.121412] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.121514] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.121659] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.121790] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1049.964564] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.964319] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.965270] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.468922] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.469210] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.469371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.469495] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1104.470430] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d40bb7c-908b-479e-856d-83760ae3cac8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.478589] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b70e39-00cf-4b12-a153-8ead0845bde9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.493603] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab671096-bf86-41ab-8dbd-562112d3ff12 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.499881] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7bfed5-a1e3-40d2-ae37-dc78215a5111 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.528018] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1104.528183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.528353] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.547009] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1105.547268] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1105.560023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223f4f98-e84a-47d4-a051-d880961f455f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.567689] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99aef3b-6753-4f6c-a677-a144c422320c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.597187] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d4aa12-c19b-4c81-b025-0be9cb02d998 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.604369] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d14891a-3220-4111-8a07-160cab7d7590 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.617097] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.120162] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1106.121556] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1106.121735] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.116702] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.118558] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.118558] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1109.118558] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1109.620022] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1109.620262] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.620422] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.620572] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.620701] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1109.964553] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.964286] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.964198] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.965808] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.469550] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.469819] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.469988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.470161] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1164.471070] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb1a42f-07ad-4031-8553-62b6e8d170e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.479173] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41592736-a0bf-44fe-9da9-6492f9fe4ff3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.493364] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb739639-0405-4b91-aea5-488125ec4600 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.499186] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756808e4-d174-4208-aec3-9b3adaa066a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.528041] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1164.528224] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.528442] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.546463] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1165.546723] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1165.559544] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2949e173-b2ec-4007-b7f5-61454b2a94b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.566954] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47101126-efeb-4351-90bd-00b6065943fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.595513] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6c8b9d-428f-4e98-913e-23d627fd8b92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.602543] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cad13d-100a-47fd-8cd6-94986dc4076f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.615507] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.118932] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1166.120275] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1166.120453] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1169.115349] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.115740] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.622619] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.622619] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1169.622868] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1170.125061] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1170.125422] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.125422] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.125543] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.125677] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1170.964824] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.964173] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.964583] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.964639] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances with incomplete migration {{(pid=63379) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11312}} [ 1172.965152] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.186341] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.186662] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_power_states {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.689328] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Getting list of instances from cluster (obj){ [ 1174.689328] env[63379]: value = "domain-c8" [ 1174.689328] env[63379]: _type = "ClusterComputeResource" [ 1174.689328] env[63379]: } {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1174.690401] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694a1265-d02a-4adc-b38a-6a1ad2013d18 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.698815] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Got total of 0 instances {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1174.699081] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.699281] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11274}} [ 1175.202086] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] There are 0 instances to clean {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11283}} [ 1225.467916] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.971412] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.971686] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.971822] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.971973] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1225.972948] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cd00a2-de8f-4737-a718-90f759bfddcc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.981196] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cda206a-eeaf-4440-af2b-7aa5364ce71e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.995351] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e380df-4a15-4bbe-9df2-227b9ae731e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.001171] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c17607e-db38-4e8d-a27d-4ec0fff965cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.029739] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181387MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1226.029871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.030078] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.143744] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1227.144022] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1227.159139] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1227.169773] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1227.169938] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1227.179045] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1227.192363] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1227.202553] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15764c34-5dd6-441c-a445-f4bf780044ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.209648] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c089586-f5b0-44a8-8af7-ea3b616bfd14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.238606] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd3e80f-fedd-4171-a23b-42664f7cbe70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.245075] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e916d6-c5f2-424b-b4da-dff506c64d87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.257397] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.760693] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1227.761949] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1227.762146] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.732s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.258759] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.259132] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.259180] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1230.259299] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1230.762043] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1230.762043] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.762265] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.762265] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1230.964965] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.964673] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1232.965129] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.966239] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.965064] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.468472] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.468712] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.468887] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.469056] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1285.469975] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9724560a-3b94-46aa-af73-bc9fe1d95681 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.478049] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54bcf60-ccd0-468e-9acb-024bb4bbfc7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.491363] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2ff14c-53cd-4d40-8efb-9bd0dd202a65 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.497252] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a91ee45-c01a-4503-a7b5-da721e69576d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.526275] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1285.526422] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.526593] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.544572] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1286.544817] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1286.557885] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549bfcac-c441-4e51-9929-f6e482f0becf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.565741] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47594eb-500a-4394-94e3-c0e8b6c18ed1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.594302] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58fe24a-1ace-4808-adb6-678f8d09f831 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.600830] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fa81a4-38d7-4633-9c99-a263e3ed1efb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.613827] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.116725] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1287.118050] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1287.118267] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.118063] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.118504] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.624115] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1291.624115] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1291.624115] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1292.126622] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1292.126989] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.127056] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.127188] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.127322] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1293.964581] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.964956] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1295.964479] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.964934] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.467483] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.467766] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.468028] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.468104] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1347.469151] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3307c6c-3841-4eb1-8aed-99ebbc189d8d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.483976] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f70ab7-8de6-4103-9fe6-fcfd32be55d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.504366] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ff887b-b5c8-466e-9a22-3b75bf30f22b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.512514] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c62d4c-97fd-48ae-98af-4711527576fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.545516] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181379MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1347.545698] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.545940] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.565871] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1348.566149] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1348.586197] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5765d8-61e0-462f-881e-a424c9dc012c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.594305] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd9e784-0ded-486f-97bc-b024c7280d4d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.630706] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f693fee2-25b3-46e0-a02d-1a6fa03bf4a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.638586] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045c84bf-4e44-45a0-88ce-99cddeaacc19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.653471] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.160163] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1349.161649] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1349.161649] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.616s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.163597] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.163904] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.164245] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1351.164245] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1351.669316] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1351.670133] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.670133] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.670133] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1352.966158] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.365664] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "0aab61e4-c055-4872-973a-20fa6802ec10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.365979] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.871491] env[63379]: DEBUG nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1354.965939] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.268209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "724c7a22-1833-4dc5-ab38-a11498a83ab8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.268423] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.419228] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.419228] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.419228] env[63379]: INFO nova.compute.claims [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1355.771428] env[63379]: DEBUG nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1355.964117] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.262331] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.262683] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.296168] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "efc5b3b6-bed4-484c-8a0c-65810747382d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.296168] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.300220] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.332117] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "0edadcca-042e-440b-985b-6338e20265fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.332310] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "0edadcca-042e-440b-985b-6338e20265fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.539022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ba1be2-495b-4794-b4b4-67a3d3a0e8c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.548678] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0a45e5-c4f7-4712-be6a-89a10edb564f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.581886] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e59f10f-b4a4-42b1-8144-e762e2f18941 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.589149] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0106d726-dcb0-4a46-b55c-cbc6427a2097 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.604097] env[63379]: DEBUG nova.compute.provider_tree [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.766350] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1356.802765] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1356.835731] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1357.102731] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "a6f7c217-a493-403d-b776-870df4575f2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.103065] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "a6f7c217-a493-403d-b776-870df4575f2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.109339] env[63379]: DEBUG nova.scheduler.client.report [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1357.296273] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.322343] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.362551] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.605442] env[63379]: DEBUG nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1357.613986] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.196s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.614503] env[63379]: DEBUG nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1357.617252] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.317s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.618965] env[63379]: INFO nova.compute.claims [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1357.849609] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "55fb6899-0321-4bf2-bf3f-2e87dd479433" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.849866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.965736] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.124545] env[63379]: DEBUG nova.compute.utils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1358.129409] env[63379]: DEBUG nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1358.129757] env[63379]: DEBUG nova.network.neutron [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1358.140245] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.352457] env[63379]: DEBUG nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1358.644030] env[63379]: DEBUG nova.policy [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9162483675d540dfb8551206627b50e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '767980ba969142098ccbdf031f6e62a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1358.647610] env[63379]: DEBUG nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1358.832045] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6313e923-b707-4577-8550-907d9a38a585 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.848117] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc72f9d-4a95-474a-98c4-4ed5c38713f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.901222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.902381] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce63c34-4d66-440e-a61b-464379157190 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.916316] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33529b5c-b43c-4f52-819d-915c34485137 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.935678] env[63379]: DEBUG nova.compute.provider_tree [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1359.375730] env[63379]: DEBUG nova.network.neutron [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Successfully created port: 129136be-c7bf-454e-a408-37372aa8bfd9 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1359.441072] env[63379]: DEBUG nova.scheduler.client.report [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1359.665797] env[63379]: DEBUG nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1359.710800] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1359.711073] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1359.711230] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1359.711405] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1359.711544] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1359.711734] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1359.715018] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1359.715018] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1359.715018] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1359.715018] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1359.715018] env[63379]: DEBUG nova.virt.hardware [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1359.715018] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25da98b-2795-4098-9cd4-64b00180c8a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.725186] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4320122d-4bc8-4251-9350-d5725112895b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.750677] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8be5a4-3ea3-414d-b846-4416518dd848 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.947149] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.948058] env[63379]: DEBUG nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1359.950854] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.656s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.955145] env[63379]: INFO nova.compute.claims [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1360.460389] env[63379]: DEBUG nova.compute.utils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1360.465259] env[63379]: DEBUG nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1360.465365] env[63379]: DEBUG nova.network.neutron [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1360.558057] env[63379]: DEBUG nova.policy [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94d8206fe6ba48a7bb8c69b6ef6e43de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e27d08147c804c0a8e9aee3a7ac5851f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1360.809656] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "30908171-e1b9-4e20-830e-419ff6d9a0fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.809943] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.971609] env[63379]: DEBUG nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1361.108833] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fd1709-6608-408e-8c24-1b7d13c77290 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.118057] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42b7bc9-ab2c-4d5f-8198-7606db017009 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.151525] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657fcd7b-57e5-4a54-b6eb-8a283cf424b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.164027] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26756ba0-3037-4d77-a7aa-d99bd9c9e829 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.177599] env[63379]: DEBUG nova.compute.provider_tree [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.312210] env[63379]: DEBUG nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1361.344194] env[63379]: DEBUG nova.network.neutron [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Successfully created port: 47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1361.684029] env[63379]: DEBUG nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1361.846326] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.983927] env[63379]: DEBUG nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1362.013174] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1362.013438] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1362.013593] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1362.013797] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1362.013943] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1362.014710] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1362.014932] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1362.015116] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1362.015318] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1362.015483] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1362.015655] env[63379]: DEBUG nova.virt.hardware [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1362.016605] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e11dad-f42b-41a5-bbbd-232c878a9fbe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.027906] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7be1b0e-2fd2-47d3-abad-1af64e1b3f97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.187730] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.237s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1362.188279] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1362.192746] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.871s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1362.195840] env[63379]: INFO nova.compute.claims [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1362.708273] env[63379]: DEBUG nova.compute.utils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1362.713842] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1362.714052] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1362.812974] env[63379]: DEBUG nova.policy [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fe8b981270b4c9d8d937026615dec0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f2e46a9dbd64c68a27219215a0c0b6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1363.214430] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1363.387872] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1f4d7e-52c0-4bab-a8c5-d82df55a2c83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.399125] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c78ca35-d4f1-4021-9d95-482063dbd0d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.445362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ba5d52-a08d-4d40-a91f-1599d3c20612 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.457944] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e75c764-2a2e-499d-9f92-dfc73f6ea92b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.480291] env[63379]: DEBUG nova.compute.provider_tree [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.672063] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Successfully created port: 01b61007-ef4f-4f75-871c-33a30b49ecf4 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.692393] env[63379]: DEBUG nova.network.neutron [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Successfully updated port: 129136be-c7bf-454e-a408-37372aa8bfd9 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1363.987916] env[63379]: DEBUG nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1364.065331] env[63379]: DEBUG nova.network.neutron [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Successfully updated port: 47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1364.195099] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "refresh_cache-0aab61e4-c055-4872-973a-20fa6802ec10" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.195293] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "refresh_cache-0aab61e4-c055-4872-973a-20fa6802ec10" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.195442] env[63379]: DEBUG nova.network.neutron [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.231713] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1364.251892] env[63379]: DEBUG nova.compute.manager [req-5c862f70-8fb7-4dad-8777-0f26d2ca6fce req-47972223-8a2a-4a14-b2ca-b5aabfa32880 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Received event network-vif-plugged-129136be-c7bf-454e-a408-37372aa8bfd9 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1364.251892] env[63379]: DEBUG oslo_concurrency.lockutils [req-5c862f70-8fb7-4dad-8777-0f26d2ca6fce req-47972223-8a2a-4a14-b2ca-b5aabfa32880 service nova] Acquiring lock "0aab61e4-c055-4872-973a-20fa6802ec10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.251892] env[63379]: DEBUG oslo_concurrency.lockutils [req-5c862f70-8fb7-4dad-8777-0f26d2ca6fce req-47972223-8a2a-4a14-b2ca-b5aabfa32880 service nova] Lock "0aab61e4-c055-4872-973a-20fa6802ec10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.251892] env[63379]: DEBUG oslo_concurrency.lockutils [req-5c862f70-8fb7-4dad-8777-0f26d2ca6fce req-47972223-8a2a-4a14-b2ca-b5aabfa32880 service nova] Lock "0aab61e4-c055-4872-973a-20fa6802ec10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.251892] env[63379]: DEBUG nova.compute.manager [req-5c862f70-8fb7-4dad-8777-0f26d2ca6fce req-47972223-8a2a-4a14-b2ca-b5aabfa32880 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] No waiting events found dispatching network-vif-plugged-129136be-c7bf-454e-a408-37372aa8bfd9 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1364.252693] env[63379]: WARNING nova.compute.manager [req-5c862f70-8fb7-4dad-8777-0f26d2ca6fce req-47972223-8a2a-4a14-b2ca-b5aabfa32880 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Received unexpected event network-vif-plugged-129136be-c7bf-454e-a408-37372aa8bfd9 for instance with vm_state building and task_state spawning. [ 1364.263180] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1364.263371] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1364.263522] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1364.263733] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1364.263883] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1364.264035] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1364.264240] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1364.264391] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1364.264548] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1364.264711] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1364.264895] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1364.266385] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f049fabd-a00f-439e-a542-b671700e617f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.276842] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e43dab3-85c4-4293-b6e5-1c2dea6ace6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.494386] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.495623] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1364.501166] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.137s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.501166] env[63379]: INFO nova.compute.claims [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1364.569886] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "refresh_cache-724c7a22-1833-4dc5-ab38-a11498a83ab8" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.570207] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquired lock "refresh_cache-724c7a22-1833-4dc5-ab38-a11498a83ab8" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.570470] env[63379]: DEBUG nova.network.neutron [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.764903] env[63379]: DEBUG nova.network.neutron [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1365.012387] env[63379]: DEBUG nova.compute.utils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1365.014491] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1365.014491] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1365.020678] env[63379]: DEBUG nova.network.neutron [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Updating instance_info_cache with network_info: [{"id": "129136be-c7bf-454e-a408-37372aa8bfd9", "address": "fa:16:3e:ee:ce:e6", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap129136be-c7", "ovs_interfaceid": "129136be-c7bf-454e-a408-37372aa8bfd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.115353] env[63379]: DEBUG nova.policy [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fe8b981270b4c9d8d937026615dec0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f2e46a9dbd64c68a27219215a0c0b6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1365.143933] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.144144] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.145912] env[63379]: DEBUG nova.network.neutron [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1365.406598] env[63379]: DEBUG nova.network.neutron [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Updating instance_info_cache with network_info: [{"id": "47acb26e-647c-4d9a-bcfd-7c9ea5cf9846", "address": "fa:16:3e:d8:43:69", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47acb26e-64", "ovs_interfaceid": "47acb26e-647c-4d9a-bcfd-7c9ea5cf9846", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.518807] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1365.525168] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "refresh_cache-0aab61e4-c055-4872-973a-20fa6802ec10" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.525168] env[63379]: DEBUG nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Instance network_info: |[{"id": "129136be-c7bf-454e-a408-37372aa8bfd9", "address": "fa:16:3e:ee:ce:e6", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap129136be-c7", "ovs_interfaceid": "129136be-c7bf-454e-a408-37372aa8bfd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1365.527082] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:ce:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '129136be-c7bf-454e-a408-37372aa8bfd9', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1365.541321] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1365.541790] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97c5a0c3-6ab2-4279-a736-d839fbbcad00 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.557028] env[63379]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1365.557081] env[63379]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63379) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1365.557790] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1365.557986] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating folder: Project (767980ba969142098ccbdf031f6e62a9). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1365.558348] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7af15a33-c6d4-4dab-a3bc-3a1ca2f2d227 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.569396] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created folder: Project (767980ba969142098ccbdf031f6e62a9) in parent group-v369214. [ 1365.569590] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating folder: Instances. Parent ref: group-v369218. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1365.569816] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58d22f99-a20a-4851-aa2f-5b6f9142e147 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.581213] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created folder: Instances in parent group-v369218. [ 1365.581468] env[63379]: DEBUG oslo.service.loopingcall [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1365.581686] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1365.582162] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47b65746-f3a7-45a8-b931-d31080bad9a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.605323] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1365.605323] env[63379]: value = "task-1778871" [ 1365.605323] env[63379]: _type = "Task" [ 1365.605323] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.613562] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778871, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.649900] env[63379]: DEBUG nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1365.708983] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Successfully created port: 120116f2-2c59-4c67-b5ec-2aad96939540 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1365.752598] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0145d580-e4cd-498d-b23f-d0cb9b8ddd29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.762806] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608d406e-4250-4d84-be40-5174bcfb1a0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.802829] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b921272d-8c0f-49c9-aec4-437d35936e18 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.810788] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e79544-35a4-4ac5-8438-6e85ab931b14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.824836] env[63379]: DEBUG nova.compute.provider_tree [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1365.910563] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Releasing lock "refresh_cache-724c7a22-1833-4dc5-ab38-a11498a83ab8" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.910920] env[63379]: DEBUG nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Instance network_info: |[{"id": "47acb26e-647c-4d9a-bcfd-7c9ea5cf9846", "address": "fa:16:3e:d8:43:69", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47acb26e-64", "ovs_interfaceid": "47acb26e-647c-4d9a-bcfd-7c9ea5cf9846", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1365.911377] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:43:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47acb26e-647c-4d9a-bcfd-7c9ea5cf9846', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1365.919253] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Creating folder: Project (e27d08147c804c0a8e9aee3a7ac5851f). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1365.919591] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bbf2860-1ebc-4e82-876a-5ad9385640c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.931477] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Created folder: Project (e27d08147c804c0a8e9aee3a7ac5851f) in parent group-v369214. [ 1365.931697] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Creating folder: Instances. Parent ref: group-v369221. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1365.931996] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7510e67-d426-4215-8d8f-5e41dea58a62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.941469] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Created folder: Instances in parent group-v369221. [ 1365.941706] env[63379]: DEBUG oslo.service.loopingcall [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1365.941906] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1365.942157] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b6fc67d-7b27-4692-811c-e3e5d74ed18c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.961790] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1365.961790] env[63379]: value = "task-1778874" [ 1365.961790] env[63379]: _type = "Task" [ 1365.961790] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.970646] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778874, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.058999] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Successfully updated port: 01b61007-ef4f-4f75-871c-33a30b49ecf4 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1366.116691] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778871, 'name': CreateVM_Task, 'duration_secs': 0.391445} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.117298] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1366.139017] env[63379]: DEBUG oslo_vmware.service [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fada8276-3e28-4dc1-81b3-10aef30ec7ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.147078] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.147078] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.147415] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1366.147960] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67a80178-008f-4716-b965-56d09cc159bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.153932] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1366.153932] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b63613-7bff-6266-1728-dda7e93b021b" [ 1366.153932] env[63379]: _type = "Task" [ 1366.153932] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.173156] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.173416] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1366.173665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.173829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.174283] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1366.174941] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4cc7db4-1084-4739-95ed-6f2cac2aa176 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.181801] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.195373] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1366.195373] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1366.195373] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50616ab0-ea21-4a4f-85e2-f8f286443f3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.205079] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c4e7598-1bb9-4c37-bcf2-a3ffe8bd237d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.214075] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1366.214075] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b07778-b81c-bcb2-a3b3-e17a611381ac" [ 1366.214075] env[63379]: _type = "Task" [ 1366.214075] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.225395] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b07778-b81c-bcb2-a3b3-e17a611381ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.346646] env[63379]: ERROR nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [req-82e292d2-decc-4567-aece-fb5b7854bede] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-82e292d2-decc-4567-aece-fb5b7854bede"}]} [ 1366.367058] env[63379]: DEBUG nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1366.381708] env[63379]: DEBUG nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1366.381708] env[63379]: DEBUG nova.compute.provider_tree [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1366.396074] env[63379]: DEBUG nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1366.416128] env[63379]: DEBUG nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1366.472725] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778874, 'name': CreateVM_Task, 'duration_secs': 0.400073} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.473624] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1366.476090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.476090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.476090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1366.476090] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a11f0ef-861f-4ec4-a247-56fdb013da93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.480474] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1366.480474] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52be000a-d9ed-6437-6cab-67918aa8ca7a" [ 1366.480474] env[63379]: _type = "Task" [ 1366.480474] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.492102] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52be000a-d9ed-6437-6cab-67918aa8ca7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.533896] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1366.559873] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1366.560240] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1366.561271] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1366.561475] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1366.561667] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1366.562891] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1366.562891] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1366.562891] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1366.562891] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1366.562891] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1366.563104] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1366.563357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "refresh_cache-c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.563505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "refresh_cache-c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.563633] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1366.566104] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dd165f-8560-4952-9d86-b3d97fc70711 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.569583] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b94745-176d-4560-9cda-fa3224d4b3f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.585632] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc533972-2f1a-4da3-9d47-68c60193107b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.594531] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c91680-c118-444e-9790-fb1c3c2f8bdc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.609348] env[63379]: DEBUG nova.compute.manager [req-28a77397-e67d-4af5-a6d4-deb7bcc6f5fb req-dc30f1ee-8c9f-4eea-bd43-8fe2f9ca5a84 service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Received event network-vif-plugged-01b61007-ef4f-4f75-871c-33a30b49ecf4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1366.609814] env[63379]: DEBUG oslo_concurrency.lockutils [req-28a77397-e67d-4af5-a6d4-deb7bcc6f5fb req-dc30f1ee-8c9f-4eea-bd43-8fe2f9ca5a84 service nova] Acquiring lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.610113] env[63379]: DEBUG oslo_concurrency.lockutils [req-28a77397-e67d-4af5-a6d4-deb7bcc6f5fb req-dc30f1ee-8c9f-4eea-bd43-8fe2f9ca5a84 service nova] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.610375] env[63379]: DEBUG oslo_concurrency.lockutils [req-28a77397-e67d-4af5-a6d4-deb7bcc6f5fb req-dc30f1ee-8c9f-4eea-bd43-8fe2f9ca5a84 service nova] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.610680] env[63379]: DEBUG nova.compute.manager [req-28a77397-e67d-4af5-a6d4-deb7bcc6f5fb req-dc30f1ee-8c9f-4eea-bd43-8fe2f9ca5a84 service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] No waiting events found dispatching network-vif-plugged-01b61007-ef4f-4f75-871c-33a30b49ecf4 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1366.610921] env[63379]: WARNING nova.compute.manager [req-28a77397-e67d-4af5-a6d4-deb7bcc6f5fb req-dc30f1ee-8c9f-4eea-bd43-8fe2f9ca5a84 service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Received unexpected event network-vif-plugged-01b61007-ef4f-4f75-871c-33a30b49ecf4 for instance with vm_state building and task_state spawning. [ 1366.641807] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dea2b11-6e1e-4d44-82b2-1474df09eaad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.650933] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c2aa02-9b2c-4af2-a82b-b9710df137f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.667668] env[63379]: DEBUG nova.compute.provider_tree [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1366.725549] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Preparing fetch location {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1366.728019] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating directory with path [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1366.728019] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57e317f7-b2c0-4e4d-b2e1-4f4d0249bd7d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.738441] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created directory with path [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1366.738644] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Fetch image to [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1366.738853] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Downloading image file data d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 to [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk on the data store datastore1 {{(pid=63379) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1366.739696] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e965c9fa-da21-434e-a4f5-d93a44c0889c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.748341] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144083df-68d7-471e-8f6b-4a9659257c57 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.759212] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7985225-7481-4e4a-893e-0914a39878f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.796539] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4207a9d8-c287-4a17-9222-564f47df1d3a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.804088] env[63379]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7f62303d-8960-415a-b536-05075bf28388 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.832343] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Downloading image file data d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 to the data store datastore1 {{(pid=63379) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1366.901148] env[63379]: DEBUG oslo_vmware.rw_handles [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63379) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1366.993033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.993420] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1366.993534] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.164145] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1367.207208] env[63379]: DEBUG nova.scheduler.client.report [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 13 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1367.207479] env[63379]: DEBUG nova.compute.provider_tree [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 13 to 14 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1367.207869] env[63379]: DEBUG nova.compute.provider_tree [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1367.491300] env[63379]: DEBUG nova.compute.manager [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Received event network-changed-129136be-c7bf-454e-a408-37372aa8bfd9 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1367.491805] env[63379]: DEBUG nova.compute.manager [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Refreshing instance network info cache due to event network-changed-129136be-c7bf-454e-a408-37372aa8bfd9. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1367.492044] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Acquiring lock "refresh_cache-0aab61e4-c055-4872-973a-20fa6802ec10" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.492198] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Acquired lock "refresh_cache-0aab61e4-c055-4872-973a-20fa6802ec10" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.492399] env[63379]: DEBUG nova.network.neutron [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Refreshing network info cache for port 129136be-c7bf-454e-a408-37372aa8bfd9 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1367.545377] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Updating instance_info_cache with network_info: [{"id": "01b61007-ef4f-4f75-871c-33a30b49ecf4", "address": "fa:16:3e:d3:b6:bf", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b61007-ef", "ovs_interfaceid": "01b61007-ef4f-4f75-871c-33a30b49ecf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.557713] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Successfully updated port: 120116f2-2c59-4c67-b5ec-2aad96939540 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1367.572068] env[63379]: DEBUG oslo_vmware.rw_handles [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Completed reading data from the image iterator. {{(pid=63379) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1367.572343] env[63379]: DEBUG oslo_vmware.rw_handles [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1367.714730] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Downloaded image file data d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 to vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk on the data store datastore1 {{(pid=63379) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1367.719687] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Caching image {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1367.719861] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copying Virtual Disk [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk to [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1367.720717] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.222s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.727279] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1367.734084] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4708e3d-1639-4ac9-a91e-a8c51d97db73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.743527] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.603s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.745197] env[63379]: INFO nova.compute.claims [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1367.763180] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1367.763180] env[63379]: value = "task-1778876" [ 1367.763180] env[63379]: _type = "Task" [ 1367.763180] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.772757] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.051662] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "refresh_cache-c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.052065] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Instance network_info: |[{"id": "01b61007-ef4f-4f75-871c-33a30b49ecf4", "address": "fa:16:3e:d3:b6:bf", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b61007-ef", "ovs_interfaceid": "01b61007-ef4f-4f75-871c-33a30b49ecf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1368.052518] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:b6:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01b61007-ef4f-4f75-871c-33a30b49ecf4', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1368.060698] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Creating folder: Project (1f2e46a9dbd64c68a27219215a0c0b6a). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1368.061295] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "refresh_cache-efc5b3b6-bed4-484c-8a0c-65810747382d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.061402] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "refresh_cache-efc5b3b6-bed4-484c-8a0c-65810747382d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.061617] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1368.062610] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c380633-28d5-42be-85e2-b68cc70e3577 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.079466] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Created folder: Project (1f2e46a9dbd64c68a27219215a0c0b6a) in parent group-v369214. [ 1368.079699] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Creating folder: Instances. Parent ref: group-v369224. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1368.079942] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f62c12a-eb64-4f3f-8454-3c69ae875362 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.091568] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Created folder: Instances in parent group-v369224. [ 1368.091867] env[63379]: DEBUG oslo.service.loopingcall [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1368.092073] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1368.092308] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d0c746a-e9f9-461b-9f2e-575dd97168fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.117143] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1368.117143] env[63379]: value = "task-1778879" [ 1368.117143] env[63379]: _type = "Task" [ 1368.117143] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.124579] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778879, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.246241] env[63379]: DEBUG nova.compute.utils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1368.253830] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1368.254258] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1368.285216] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778876, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.366265] env[63379]: DEBUG nova.policy [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fe8b981270b4c9d8d937026615dec0a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f2e46a9dbd64c68a27219215a0c0b6a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1368.398015] env[63379]: DEBUG nova.network.neutron [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Updated VIF entry in instance network info cache for port 129136be-c7bf-454e-a408-37372aa8bfd9. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1368.398456] env[63379]: DEBUG nova.network.neutron [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Updating instance_info_cache with network_info: [{"id": "129136be-c7bf-454e-a408-37372aa8bfd9", "address": "fa:16:3e:ee:ce:e6", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap129136be-c7", "ovs_interfaceid": "129136be-c7bf-454e-a408-37372aa8bfd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.605156] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1368.636304] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778879, 'name': CreateVM_Task, 'duration_secs': 0.482193} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.636870] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1368.637265] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.637870] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.637949] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1368.638426] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68833c3-e298-41c1-a166-edf98c11b891 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.645817] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1368.645817] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e55d3e-1fc1-4549-f637-02c673e6bb82" [ 1368.645817] env[63379]: _type = "Task" [ 1368.645817] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.654325] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e55d3e-1fc1-4549-f637-02c673e6bb82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.752555] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1368.779751] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.778413} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.779751] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copied Virtual Disk [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk to [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1368.779751] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/tmp-sparse.vmdk {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1368.779751] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-540dd20e-9d8d-470f-a528-df13674f0f08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.791028] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1368.791028] env[63379]: value = "task-1778880" [ 1368.791028] env[63379]: _type = "Task" [ 1368.791028] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.802266] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.852550] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Successfully created port: fc0b60ef-5c6d-4d2a-9318-02840aeb7595 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1368.867290] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Updating instance_info_cache with network_info: [{"id": "120116f2-2c59-4c67-b5ec-2aad96939540", "address": "fa:16:3e:5b:1b:d6", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120116f2-2c", "ovs_interfaceid": "120116f2-2c59-4c67-b5ec-2aad96939540", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.903054] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Releasing lock "refresh_cache-0aab61e4-c055-4872-973a-20fa6802ec10" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.903327] env[63379]: DEBUG nova.compute.manager [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Received event network-vif-plugged-47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1368.903519] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Acquiring lock "724c7a22-1833-4dc5-ab38-a11498a83ab8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.903892] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.903892] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.904079] env[63379]: DEBUG nova.compute.manager [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] No waiting events found dispatching network-vif-plugged-47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1368.904243] env[63379]: WARNING nova.compute.manager [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Received unexpected event network-vif-plugged-47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 for instance with vm_state building and task_state spawning. [ 1368.904531] env[63379]: DEBUG nova.compute.manager [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Received event network-changed-47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1368.904531] env[63379]: DEBUG nova.compute.manager [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Refreshing instance network info cache due to event network-changed-47acb26e-647c-4d9a-bcfd-7c9ea5cf9846. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1368.904747] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Acquiring lock "refresh_cache-724c7a22-1833-4dc5-ab38-a11498a83ab8" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.904994] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Acquired lock "refresh_cache-724c7a22-1833-4dc5-ab38-a11498a83ab8" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.905068] env[63379]: DEBUG nova.network.neutron [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Refreshing network info cache for port 47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1368.954220] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dde146d-7b91-4bc1-86c3-7fa665bd69aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.962900] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d2dfbc-b862-428c-99d5-08d6f589e4bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.994770] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6649b0b-4272-4f68-900a-827ec8175a26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.004101] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef90e30-eaa1-42e2-a741-0341d5d0d6f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.019580] env[63379]: DEBUG nova.compute.provider_tree [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1369.155876] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.156339] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1369.156498] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.304338] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036251} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.304581] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1369.304807] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Moving file from [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 to [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48. {{(pid=63379) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1369.305156] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b1b1d028-3f7c-4419-9c6f-e34a1f866517 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.314252] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1369.314252] env[63379]: value = "task-1778881" [ 1369.314252] env[63379]: _type = "Task" [ 1369.314252] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.323581] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778881, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.369506] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "refresh_cache-efc5b3b6-bed4-484c-8a0c-65810747382d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.369943] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Instance network_info: |[{"id": "120116f2-2c59-4c67-b5ec-2aad96939540", "address": "fa:16:3e:5b:1b:d6", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120116f2-2c", "ovs_interfaceid": "120116f2-2c59-4c67-b5ec-2aad96939540", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1369.370715] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:1b:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '120116f2-2c59-4c67-b5ec-2aad96939540', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.380213] env[63379]: DEBUG oslo.service.loopingcall [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1369.380527] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1369.381292] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-497f5e25-15d1-4058-802c-518541855247 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.410539] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.410539] env[63379]: value = "task-1778882" [ 1369.410539] env[63379]: _type = "Task" [ 1369.410539] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.420415] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778882, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.522807] env[63379]: DEBUG nova.scheduler.client.report [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1369.763809] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1369.830038] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1369.830038] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1369.830038] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.830493] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1369.830493] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.830493] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1369.830493] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1369.830493] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1369.830684] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1369.830760] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1369.830937] env[63379]: DEBUG nova.virt.hardware [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1369.832570] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8137418-0df7-4fc3-ab0f-17f4a3dc257e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.838746] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778881, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.03443} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.839559] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] File moved {{(pid=63379) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1369.839980] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Cleaning up location [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1369.839980] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] vmware_temp/76353f89-cc3e-4cf5-9b34-ec671eb00558 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1369.840226] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e5972cd-9c7d-4c3e-848f-bc55d7224c8c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.846399] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ccb4d8-c464-4424-856c-1e9229aea804 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.851866] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1369.851866] env[63379]: value = "task-1778884" [ 1369.851866] env[63379]: _type = "Task" [ 1369.851866] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.876042] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.908782] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.909108] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.931882] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778882, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.986643] env[63379]: DEBUG nova.network.neutron [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Updated VIF entry in instance network info cache for port 47acb26e-647c-4d9a-bcfd-7c9ea5cf9846. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1369.987019] env[63379]: DEBUG nova.network.neutron [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Updating instance_info_cache with network_info: [{"id": "47acb26e-647c-4d9a-bcfd-7c9ea5cf9846", "address": "fa:16:3e:d8:43:69", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47acb26e-64", "ovs_interfaceid": "47acb26e-647c-4d9a-bcfd-7c9ea5cf9846", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.027803] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.284s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.028329] env[63379]: DEBUG nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1370.031439] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.132s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.033296] env[63379]: INFO nova.compute.claims [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1370.286455] env[63379]: DEBUG nova.compute.manager [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Received event network-changed-01b61007-ef4f-4f75-871c-33a30b49ecf4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1370.286455] env[63379]: DEBUG nova.compute.manager [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Refreshing instance network info cache due to event network-changed-01b61007-ef4f-4f75-871c-33a30b49ecf4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1370.286682] env[63379]: DEBUG oslo_concurrency.lockutils [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] Acquiring lock "refresh_cache-c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.286822] env[63379]: DEBUG oslo_concurrency.lockutils [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] Acquired lock "refresh_cache-c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.286997] env[63379]: DEBUG nova.network.neutron [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Refreshing network info cache for port 01b61007-ef4f-4f75-871c-33a30b49ecf4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1370.302969] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.303146] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.363418] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032195} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.363418] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1370.363958] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bddc2f6b-1d01-4111-af71-15b7cbc15b9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.371501] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1370.371501] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527efa88-1b9a-a049-796e-ea915d4a8299" [ 1370.371501] env[63379]: _type = "Task" [ 1370.371501] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.382096] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527efa88-1b9a-a049-796e-ea915d4a8299, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.428950] env[63379]: DEBUG nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1370.433062] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778882, 'name': CreateVM_Task, 'duration_secs': 0.538328} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.433844] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1370.434722] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.434835] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.435190] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1370.438075] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9fd174c-5ffe-4676-a853-9889041bc6b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.444822] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1370.444822] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f8c3a-aeda-7b21-972f-a5551c45115d" [ 1370.444822] env[63379]: _type = "Task" [ 1370.444822] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.457656] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f8c3a-aeda-7b21-972f-a5551c45115d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.490503] env[63379]: DEBUG oslo_concurrency.lockutils [req-b405266f-d928-4d47-b2dd-af4ceecbc6a6 req-2d19c7b5-32c2-4c5a-8e1f-1e0ed33781f3 service nova] Releasing lock "refresh_cache-724c7a22-1833-4dc5-ab38-a11498a83ab8" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.540022] env[63379]: DEBUG nova.compute.utils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1370.545212] env[63379]: DEBUG nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Not allocating networking since 'none' was specified. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1370.889684] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527efa88-1b9a-a049-796e-ea915d4a8299, 'name': SearchDatastore_Task, 'duration_secs': 0.01257} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.889992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.890251] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0aab61e4-c055-4872-973a-20fa6802ec10/0aab61e4-c055-4872-973a-20fa6802ec10.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1370.890715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.890834] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1370.891149] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87310372-2d3c-44f1-b147-e4ed9cd2c1f8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.894767] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b440b9bf-7868-4ebb-b66b-9c01a35e89ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.904042] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1370.904042] env[63379]: value = "task-1778885" [ 1370.904042] env[63379]: _type = "Task" [ 1370.904042] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.905536] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1370.905678] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1370.910287] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aca6ed31-2681-4a7d-b6e4-bb6c9a3df507 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.922100] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.924242] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1370.924242] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a9edc6-3c0a-d7db-9bc4-5a94e17a00ce" [ 1370.924242] env[63379]: _type = "Task" [ 1370.924242] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.936806] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a9edc6-3c0a-d7db-9bc4-5a94e17a00ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.958183] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f8c3a-aeda-7b21-972f-a5551c45115d, 'name': SearchDatastore_Task, 'duration_secs': 0.017676} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.958502] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.958741] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.959144] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.969126] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.042494] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Successfully updated port: fc0b60ef-5c6d-4d2a-9318-02840aeb7595 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1371.045435] env[63379]: DEBUG nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1371.257271] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e836256e-2d22-40f0-a712-785b0aa7d272 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.267738] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29146116-619d-4051-aab6-c72656313f75 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.305418] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328cdcb4-1345-4fcd-bf45-d22dd7ce4cdd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.317544] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb9e7fc-ed93-4ba8-a745-f04642fb847f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.335049] env[63379]: DEBUG nova.compute.provider_tree [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1371.417865] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.438280] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a9edc6-3c0a-d7db-9bc4-5a94e17a00ce, 'name': SearchDatastore_Task, 'duration_secs': 0.029909} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.438519] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b67c5ec-e599-49c0-b0ed-47a5f2688808 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.444819] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1371.444819] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52805ccf-6a2b-75da-b0b9-7a8f0168ab16" [ 1371.444819] env[63379]: _type = "Task" [ 1371.444819] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.453602] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52805ccf-6a2b-75da-b0b9-7a8f0168ab16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.553813] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.554120] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.554201] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1371.597288] env[63379]: DEBUG nova.network.neutron [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Updated VIF entry in instance network info cache for port 01b61007-ef4f-4f75-871c-33a30b49ecf4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1371.599104] env[63379]: DEBUG nova.network.neutron [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Updating instance_info_cache with network_info: [{"id": "01b61007-ef4f-4f75-871c-33a30b49ecf4", "address": "fa:16:3e:d3:b6:bf", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b61007-ef", "ovs_interfaceid": "01b61007-ef4f-4f75-871c-33a30b49ecf4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.840035] env[63379]: DEBUG nova.scheduler.client.report [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1371.916468] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.956934] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52805ccf-6a2b-75da-b0b9-7a8f0168ab16, 'name': SearchDatastore_Task, 'duration_secs': 0.064112} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.957286] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.957487] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 724c7a22-1833-4dc5-ab38-a11498a83ab8/724c7a22-1833-4dc5-ab38-a11498a83ab8.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1371.957755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.957936] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1371.958169] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08c3678d-b4ee-4130-9bb9-b96065cae2a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.960087] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4d9f1ff-69bc-4e34-ba12-2ef92498fb32 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.967337] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1371.967337] env[63379]: value = "task-1778887" [ 1371.967337] env[63379]: _type = "Task" [ 1371.967337] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.975331] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.002014] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1372.002222] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1372.002993] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4423b47b-93bc-47f6-8cfe-b5d66dd18b83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.009462] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1372.009462] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5220236f-3881-7123-e606-8ae8250c753b" [ 1372.009462] env[63379]: _type = "Task" [ 1372.009462] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.018517] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5220236f-3881-7123-e606-8ae8250c753b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.067523] env[63379]: DEBUG nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1372.102634] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1372.103230] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1372.103230] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1372.104080] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1372.104244] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1372.104390] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1372.104604] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1372.104770] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1372.104964] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1372.105216] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1372.105861] env[63379]: DEBUG nova.virt.hardware [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1372.110441] env[63379]: DEBUG oslo_concurrency.lockutils [req-ae69781e-1d0a-4852-b000-26ec6d8b9745 req-71ba4ebb-10f2-476a-a8a4-cb3511299fbd service nova] Releasing lock "refresh_cache-c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.110441] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6aafacb-e0bc-47b1-9ad9-9b8bdc02eba7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.114079] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1372.123449] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a31491-c5ce-4478-a786-36e735690b35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.145017] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1372.153512] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Creating folder: Project (ee0e45b288d2480d85eacee882c87a04). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1372.154972] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e95ff3ba-c3df-49b8-9f8c-7b8b266f9fe2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.170020] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Created folder: Project (ee0e45b288d2480d85eacee882c87a04) in parent group-v369214. [ 1372.170020] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Creating folder: Instances. Parent ref: group-v369229. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1372.170020] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c481fbf-5c9b-4bab-9a56-76bc091af68e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.195773] env[63379]: DEBUG nova.compute.manager [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Received event network-vif-plugged-120116f2-2c59-4c67-b5ec-2aad96939540 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1372.195773] env[63379]: DEBUG oslo_concurrency.lockutils [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] Acquiring lock "efc5b3b6-bed4-484c-8a0c-65810747382d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.195957] env[63379]: DEBUG oslo_concurrency.lockutils [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.196116] env[63379]: DEBUG oslo_concurrency.lockutils [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.196305] env[63379]: DEBUG nova.compute.manager [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] No waiting events found dispatching network-vif-plugged-120116f2-2c59-4c67-b5ec-2aad96939540 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1372.196434] env[63379]: WARNING nova.compute.manager [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Received unexpected event network-vif-plugged-120116f2-2c59-4c67-b5ec-2aad96939540 for instance with vm_state building and task_state spawning. [ 1372.196592] env[63379]: DEBUG nova.compute.manager [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Received event network-changed-120116f2-2c59-4c67-b5ec-2aad96939540 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1372.197067] env[63379]: DEBUG nova.compute.manager [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Refreshing instance network info cache due to event network-changed-120116f2-2c59-4c67-b5ec-2aad96939540. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1372.197067] env[63379]: DEBUG oslo_concurrency.lockutils [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] Acquiring lock "refresh_cache-efc5b3b6-bed4-484c-8a0c-65810747382d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1372.197067] env[63379]: DEBUG oslo_concurrency.lockutils [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] Acquired lock "refresh_cache-efc5b3b6-bed4-484c-8a0c-65810747382d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.198073] env[63379]: DEBUG nova.network.neutron [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Refreshing network info cache for port 120116f2-2c59-4c67-b5ec-2aad96939540 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1372.200916] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Created folder: Instances in parent group-v369229. [ 1372.201164] env[63379]: DEBUG oslo.service.loopingcall [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1372.201563] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1372.201785] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88cfb9d1-6507-450b-aa15-57bb96c181cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.229697] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1372.229697] env[63379]: value = "task-1778890" [ 1372.229697] env[63379]: _type = "Task" [ 1372.229697] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.246203] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.346453] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1372.346453] env[63379]: DEBUG nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1372.350509] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.504s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.352803] env[63379]: INFO nova.compute.claims [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1372.418210] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.477507] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.522943] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5220236f-3881-7123-e606-8ae8250c753b, 'name': SearchDatastore_Task, 'duration_secs': 0.461565} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.525905] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13542b2b-65e1-4428-ab6e-9250062522e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.536743] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1372.536743] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d9ec4c-5b7b-94bd-ee0b-08ce554564bb" [ 1372.536743] env[63379]: _type = "Task" [ 1372.536743] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.547585] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d9ec4c-5b7b-94bd-ee0b-08ce554564bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.729262] env[63379]: DEBUG nova.network.neutron [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Updating instance_info_cache with network_info: [{"id": "fc0b60ef-5c6d-4d2a-9318-02840aeb7595", "address": "fa:16:3e:cb:11:81", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc0b60ef-5c", "ovs_interfaceid": "fc0b60ef-5c6d-4d2a-9318-02840aeb7595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.743413] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 15%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.813661] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.813661] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.859996] env[63379]: DEBUG nova.compute.utils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1372.862210] env[63379]: DEBUG nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1372.864019] env[63379]: DEBUG nova.network.neutron [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1372.919454] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.978997] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.046940] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d9ec4c-5b7b-94bd-ee0b-08ce554564bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.052456] env[63379]: DEBUG nova.policy [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76ef14f3e15341408d2ac3bb139ff93d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '862e52cb5e924bbebb353a9ced8f5e80', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1373.233574] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.233574] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Instance network_info: |[{"id": "fc0b60ef-5c6d-4d2a-9318-02840aeb7595", "address": "fa:16:3e:cb:11:81", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc0b60ef-5c", "ovs_interfaceid": "fc0b60ef-5c6d-4d2a-9318-02840aeb7595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1373.242022] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:11:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc0b60ef-5c6d-4d2a-9318-02840aeb7595', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1373.255221] env[63379]: DEBUG oslo.service.loopingcall [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1373.255326] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1373.256521] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2482f5d-7164-40cf-bbdf-93dddf3b4dad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.281034] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.287988] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1373.287988] env[63379]: value = "task-1778891" [ 1373.287988] env[63379]: _type = "Task" [ 1373.287988] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.300223] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778891, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.362604] env[63379]: DEBUG nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1373.432312] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.475324] env[63379]: DEBUG nova.network.neutron [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Updated VIF entry in instance network info cache for port 120116f2-2c59-4c67-b5ec-2aad96939540. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.475453] env[63379]: DEBUG nova.network.neutron [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Updating instance_info_cache with network_info: [{"id": "120116f2-2c59-4c67-b5ec-2aad96939540", "address": "fa:16:3e:5b:1b:d6", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120116f2-2c", "ovs_interfaceid": "120116f2-2c59-4c67-b5ec-2aad96939540", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.492172] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.547419] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d9ec4c-5b7b-94bd-ee0b-08ce554564bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.623027] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcc10f2-fb8d-4708-9789-9e5d0c728364 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.631239] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6eb73e3-92ee-490e-a5d0-580a02b69e7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.668560] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a28b5e0-f50d-4d45-9131-c21987f42017 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.677130] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef42659-0d09-48b0-ae14-4e60251b472d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.694999] env[63379]: DEBUG nova.compute.provider_tree [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.746811] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.803898] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778891, 'name': CreateVM_Task} progress is 15%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.925411] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.982956] env[63379]: DEBUG oslo_concurrency.lockutils [req-a76bcefe-616c-4300-95dd-77c6966753b0 req-c904d863-2fcf-42fb-8c97-472a58f35fd0 service nova] Releasing lock "refresh_cache-efc5b3b6-bed4-484c-8a0c-65810747382d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.988634] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.048836] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d9ec4c-5b7b-94bd-ee0b-08ce554564bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.198696] env[63379]: DEBUG nova.scheduler.client.report [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1374.243799] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.300657] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778891, 'name': CreateVM_Task} progress is 15%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.340112] env[63379]: DEBUG nova.network.neutron [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Successfully created port: a3f7ad68-1a71-4217-91b5-0d8a762a15c5 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1374.372193] env[63379]: DEBUG nova.compute.manager [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Received event network-vif-plugged-fc0b60ef-5c6d-4d2a-9318-02840aeb7595 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1374.372305] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] Acquiring lock "0edadcca-042e-440b-985b-6338e20265fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.372763] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] Lock "0edadcca-042e-440b-985b-6338e20265fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.372763] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] Lock "0edadcca-042e-440b-985b-6338e20265fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.372936] env[63379]: DEBUG nova.compute.manager [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] No waiting events found dispatching network-vif-plugged-fc0b60ef-5c6d-4d2a-9318-02840aeb7595 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1374.373114] env[63379]: WARNING nova.compute.manager [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Received unexpected event network-vif-plugged-fc0b60ef-5c6d-4d2a-9318-02840aeb7595 for instance with vm_state building and task_state spawning. [ 1374.373295] env[63379]: DEBUG nova.compute.manager [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Received event network-changed-fc0b60ef-5c6d-4d2a-9318-02840aeb7595 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1374.373467] env[63379]: DEBUG nova.compute.manager [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Refreshing instance network info cache due to event network-changed-fc0b60ef-5c6d-4d2a-9318-02840aeb7595. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1374.373644] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] Acquiring lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.373901] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] Acquired lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.374081] env[63379]: DEBUG nova.network.neutron [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Refreshing network info cache for port fc0b60ef-5c6d-4d2a-9318-02840aeb7595 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.378378] env[63379]: DEBUG nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1374.443020] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.474278] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1374.474278] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1374.474278] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1374.474533] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1374.474533] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1374.478895] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1374.478895] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1374.479765] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1374.479981] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1374.480180] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1374.482495] env[63379]: DEBUG nova.virt.hardware [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1374.482495] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e0da92-e2e3-4ca1-b87e-3c8298e42872 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.505358] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.513441] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3decf8-9732-46c3-b49d-4ef86d285a4d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.551691] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d9ec4c-5b7b-94bd-ee0b-08ce554564bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.707847] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.708427] env[63379]: DEBUG nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1374.711967] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.531s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.716547] env[63379]: INFO nova.compute.claims [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.748647] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.809046] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778891, 'name': CreateVM_Task} progress is 15%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.927911] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.996674] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.052736] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d9ec4c-5b7b-94bd-ee0b-08ce554564bb, 'name': SearchDatastore_Task, 'duration_secs': 2.49547} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.052805] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.053061] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c/c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1375.053350] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.053536] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1375.053764] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f735954d-5dfe-4dbd-9727-b5aefd95f50a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.056169] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5fea2f3-eb4e-463d-8477-43c25eb11ecd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.065702] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1375.065702] env[63379]: value = "task-1778893" [ 1375.065702] env[63379]: _type = "Task" [ 1375.065702] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.070022] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1375.070213] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1375.070972] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d810fef2-479b-4ac9-a1bf-f573bb788cb1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.079565] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.084635] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1375.084635] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52884b84-844a-76c6-12bb-9728ff4f4600" [ 1375.084635] env[63379]: _type = "Task" [ 1375.084635] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.098640] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52884b84-844a-76c6-12bb-9728ff4f4600, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.214304] env[63379]: DEBUG nova.compute.utils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1375.219364] env[63379]: DEBUG nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1375.219364] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1375.250371] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.306026] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778891, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.432395] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778885, 'name': CopyVirtualDisk_Task, 'duration_secs': 4.316846} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.433174] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0aab61e4-c055-4872-973a-20fa6802ec10/0aab61e4-c055-4872-973a-20fa6802ec10.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1375.433446] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1375.433779] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1415a0d5-1742-4ecf-bded-e4c272a497fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.448046] env[63379]: DEBUG nova.policy [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8784c329b3794ea1ba4cd2fbc8a2c155', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2519cafe6c84b12b560995b2d3dd84d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1375.451691] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1375.451691] env[63379]: value = "task-1778894" [ 1375.451691] env[63379]: _type = "Task" [ 1375.451691] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.464627] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.492830] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.564213] env[63379]: DEBUG nova.network.neutron [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Updated VIF entry in instance network info cache for port fc0b60ef-5c6d-4d2a-9318-02840aeb7595. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1375.564213] env[63379]: DEBUG nova.network.neutron [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Updating instance_info_cache with network_info: [{"id": "fc0b60ef-5c6d-4d2a-9318-02840aeb7595", "address": "fa:16:3e:cb:11:81", "network": {"id": "c54d2090-56c1-4e69-85b5-a46ac3777f04", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-607216821-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f2e46a9dbd64c68a27219215a0c0b6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc0b60ef-5c", "ovs_interfaceid": "fc0b60ef-5c6d-4d2a-9318-02840aeb7595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.593448] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778893, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.609650] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52884b84-844a-76c6-12bb-9728ff4f4600, 'name': SearchDatastore_Task, 'duration_secs': 0.055884} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.609650] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f175219b-4d3d-4ebd-b145-1ba16e32a689 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.618181] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1375.618181] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c4dbc0-b5a5-db8a-bbc2-e05a0d61c0c1" [ 1375.618181] env[63379]: _type = "Task" [ 1375.618181] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.631906] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c4dbc0-b5a5-db8a-bbc2-e05a0d61c0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.721268] env[63379]: DEBUG nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1375.752607] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.814701] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778891, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.964605] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131788} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.965378] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1375.965933] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d36d217-4951-4e30-ab37-9f505ddde0b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.009803] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 0aab61e4-c055-4872-973a-20fa6802ec10/0aab61e4-c055-4872-973a-20fa6802ec10.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1376.019037] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e19e3949-3ea8-4bca-8dc3-1a244eff36e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.040386] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778887, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.871754} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.042033] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 724c7a22-1833-4dc5-ab38-a11498a83ab8/724c7a22-1833-4dc5-ab38-a11498a83ab8.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1376.042503] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1376.042791] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1376.042791] env[63379]: value = "task-1778896" [ 1376.042791] env[63379]: _type = "Task" [ 1376.042791] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.047162] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bca7b2ac-6691-403a-a978-d9d25a720321 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.061602] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778896, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.064518] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00de692-6abf-493a-aef6-8c80dc737b3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.067535] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1376.067535] env[63379]: value = "task-1778897" [ 1376.067535] env[63379]: _type = "Task" [ 1376.067535] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.075278] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a1789f6-bd70-4308-95a5-ec641d6266ce req-8423f64d-667f-4407-a41e-97ab27d84d61 service nova] Releasing lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.080237] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c507c48-4543-4b96-b239-d36752ee8b81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.090177] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778897, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.101873] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.884983} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.104339] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c/c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1376.104339] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1376.104339] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-433d7e69-999c-4224-9d58-611a10930213 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.144117] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d0ae70-fd61-43a8-81c7-8363c089d8ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.147767] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1376.147767] env[63379]: value = "task-1778898" [ 1376.147767] env[63379]: _type = "Task" [ 1376.147767] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.157051] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c4dbc0-b5a5-db8a-bbc2-e05a0d61c0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.093061} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.158655] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f094a1b-9a9e-4c1a-bcc6-d283b44fc3fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.163096] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.163096] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] efc5b3b6-bed4-484c-8a0c-65810747382d/efc5b3b6-bed4-484c-8a0c-65810747382d.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1376.166700] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40a38244-1cc5-4c6b-a8fa-21ccc6060d37 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.168651] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778898, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.199387] env[63379]: DEBUG nova.compute.provider_tree [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.202288] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1376.202288] env[63379]: value = "task-1778899" [ 1376.202288] env[63379]: _type = "Task" [ 1376.202288] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.214023] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.249903] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778890, 'name': CreateVM_Task, 'duration_secs': 3.801832} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.251231] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1376.253906] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.253906] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.253906] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.253906] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9eab621-c229-4d9e-b745-fdedadbff19c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.261192] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1376.261192] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c7a674-7f67-98a8-6e18-732f437c9df6" [ 1376.261192] env[63379]: _type = "Task" [ 1376.261192] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.270715] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c7a674-7f67-98a8-6e18-732f437c9df6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.310972] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778891, 'name': CreateVM_Task, 'duration_secs': 2.624169} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.310972] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1376.310972] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.568288] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778896, 'name': ReconfigVM_Task, 'duration_secs': 0.365394} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.568640] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 0aab61e4-c055-4872-973a-20fa6802ec10/0aab61e4-c055-4872-973a-20fa6802ec10.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1376.576037] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f709354-e1e2-4392-8f5b-d0eb60f65e00 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.584317] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778897, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077109} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.589392] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1376.589985] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1376.589985] env[63379]: value = "task-1778900" [ 1376.589985] env[63379]: _type = "Task" [ 1376.589985] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.591901] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e374d6c2-eacd-4988-b46f-e61d4a45a7a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.627446] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 724c7a22-1833-4dc5-ab38-a11498a83ab8/724c7a22-1833-4dc5-ab38-a11498a83ab8.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1376.627809] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3d28f3c-bad7-4d0d-9f42-6c88f61cb8dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.651178] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1376.651178] env[63379]: value = "task-1778901" [ 1376.651178] env[63379]: _type = "Task" [ 1376.651178] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.667625] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778901, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.672092] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778898, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072903} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.672092] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1376.672661] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f0d974-693c-4ede-b474-c9d88c43c2a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.705211] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c/c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1376.705578] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-919be38e-7347-44a2-95d6-3d5ae3d63015 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.721255] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Successfully created port: b26a8dba-cd30-4320-901e-8e9a8584ea6f {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1376.726776] env[63379]: DEBUG nova.scheduler.client.report [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1376.738751] env[63379]: DEBUG nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1376.741523] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516277} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.742864] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] efc5b3b6-bed4-484c-8a0c-65810747382d/efc5b3b6-bed4-484c-8a0c-65810747382d.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1376.743175] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1376.744161] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1376.744161] env[63379]: value = "task-1778902" [ 1376.744161] env[63379]: _type = "Task" [ 1376.744161] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.744161] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d54cca0a-f9e9-4266-b3ad-135d1f1a4dca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.757243] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778902, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.758283] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1376.758283] env[63379]: value = "task-1778903" [ 1376.758283] env[63379]: _type = "Task" [ 1376.758283] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.775015] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.775211] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.782266] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.784755] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1376.785154] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1376.785215] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.785400] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1376.785711] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.785711] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1376.786576] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1376.786576] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1376.786576] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1376.786576] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1376.786782] env[63379]: DEBUG nova.virt.hardware [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1376.794479] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb6c267-01f3-453c-b584-78f0745326a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.797720] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c7a674-7f67-98a8-6e18-732f437c9df6, 'name': SearchDatastore_Task, 'duration_secs': 0.012395} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.798102] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.798356] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1376.798604] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.798755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.798958] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.799746] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.800154] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.800404] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3f1c40e-2fd8-42ff-beb3-19bb3e7ba692 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.806244] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21b1580e-0f97-4b25-a7b7-e834d2fc9061 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.810932] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfd4027-ced4-44de-a5a7-378b9cf66693 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.819545] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.820723] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1376.821126] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b29a8f92-74b5-444e-94f5-3b3bf2bc53fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.835783] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1376.835783] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cc2230-da5f-fabf-bfae-8940ed686f32" [ 1376.835783] env[63379]: _type = "Task" [ 1376.835783] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.842035] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1376.842035] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5266c3e1-042e-9e4d-ca9c-2c415f0d8ebd" [ 1376.842035] env[63379]: _type = "Task" [ 1376.842035] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.854659] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cc2230-da5f-fabf-bfae-8940ed686f32, 'name': SearchDatastore_Task, 'duration_secs': 0.01251} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.854659] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.854659] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1376.854659] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.857046] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5266c3e1-042e-9e4d-ca9c-2c415f0d8ebd, 'name': SearchDatastore_Task, 'duration_secs': 0.013675} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.857845] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e82cf215-2627-4f7b-9c09-f7e002e295b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.864334] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1376.864334] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c96780-090d-b23b-4f33-28bca080bd8e" [ 1376.864334] env[63379]: _type = "Task" [ 1376.864334] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.876047] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c96780-090d-b23b-4f33-28bca080bd8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.109267] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778900, 'name': Rename_Task, 'duration_secs': 0.243818} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.109267] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.109267] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83f6839e-6816-44a1-90eb-17ee87a3d3f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.118948] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1377.118948] env[63379]: value = "task-1778904" [ 1377.118948] env[63379]: _type = "Task" [ 1377.118948] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.131344] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.173059] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778901, 'name': ReconfigVM_Task, 'duration_secs': 0.37966} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.173357] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 724c7a22-1833-4dc5-ab38-a11498a83ab8/724c7a22-1833-4dc5-ab38-a11498a83ab8.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.174388] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddd71726-16c6-4850-a641-1f2b6f8965c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.192133] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1377.192133] env[63379]: value = "task-1778905" [ 1377.192133] env[63379]: _type = "Task" [ 1377.192133] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.206654] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778905, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.234298] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.235223] env[63379]: DEBUG nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1377.237712] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.269s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.239422] env[63379]: INFO nova.compute.claims [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.261218] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778902, 'name': ReconfigVM_Task, 'duration_secs': 0.327857} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.264894] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Reconfigured VM instance instance-00000003 to attach disk [datastore1] c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c/c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.266353] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2232fda-1027-48bf-8b42-cb233f3dcc50 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.280493] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085634} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.280684] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1377.281075] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1377.281075] env[63379]: value = "task-1778906" [ 1377.281075] env[63379]: _type = "Task" [ 1377.281075] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.281856] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538c5568-63da-49da-9546-9b843061a313 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.298375] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778906, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.319910] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] efc5b3b6-bed4-484c-8a0c-65810747382d/efc5b3b6-bed4-484c-8a0c-65810747382d.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.320819] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27564887-8673-41cb-b78a-ae429311ed66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.344424] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1377.344424] env[63379]: value = "task-1778907" [ 1377.344424] env[63379]: _type = "Task" [ 1377.344424] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.357240] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778907, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.376565] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c96780-090d-b23b-4f33-28bca080bd8e, 'name': SearchDatastore_Task, 'duration_secs': 0.010967} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.376904] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.377240] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1377.377646] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.377914] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1377.378204] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5cc653d-cf68-41f0-b9b6-78664b15c119 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.380503] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83e94f18-c90f-426d-8f58-3085c3874e08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.390211] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1377.390211] env[63379]: value = "task-1778908" [ 1377.390211] env[63379]: _type = "Task" [ 1377.390211] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.391153] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1377.391366] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1377.395482] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9cac3c3-129f-4b04-88b4-6baaaf6ec459 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.407145] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1377.407145] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a0708a-c12f-ad74-9adb-6739726b1a9c" [ 1377.407145] env[63379]: _type = "Task" [ 1377.407145] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.407145] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.418377] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a0708a-c12f-ad74-9adb-6739726b1a9c, 'name': SearchDatastore_Task, 'duration_secs': 0.012585} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.419484] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acea45e8-3a19-4972-9ce9-3158831ef8b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.426060] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1377.426060] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5292427f-9286-cf34-bab7-2d29b5c89a53" [ 1377.426060] env[63379]: _type = "Task" [ 1377.426060] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.436784] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5292427f-9286-cf34-bab7-2d29b5c89a53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.641404] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778904, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.710017] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778905, 'name': Rename_Task, 'duration_secs': 0.192787} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.710214] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.710464] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69ebf2cb-2a49-48ee-adb7-b789b0d164de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.724366] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1377.724366] env[63379]: value = "task-1778909" [ 1377.724366] env[63379]: _type = "Task" [ 1377.724366] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.744902] env[63379]: DEBUG nova.compute.utils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1377.750907] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778909, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.751490] env[63379]: DEBUG nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1377.751671] env[63379]: DEBUG nova.network.neutron [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1377.807533] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778906, 'name': Rename_Task, 'duration_secs': 0.170474} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.807768] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.808083] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2417a1d-868a-4d14-af55-c61dbed30145 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.819394] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1377.819394] env[63379]: value = "task-1778910" [ 1377.819394] env[63379]: _type = "Task" [ 1377.819394] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.838262] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778910, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.859434] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778907, 'name': ReconfigVM_Task, 'duration_secs': 0.339422} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.859747] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Reconfigured VM instance instance-00000004 to attach disk [datastore1] efc5b3b6-bed4-484c-8a0c-65810747382d/efc5b3b6-bed4-484c-8a0c-65810747382d.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.861068] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-522ff180-24f7-4f56-b0b0-761c0aa4ab92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.871331] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1377.871331] env[63379]: value = "task-1778911" [ 1377.871331] env[63379]: _type = "Task" [ 1377.871331] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.888631] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778911, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.907117] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778908, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.941328] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5292427f-9286-cf34-bab7-2d29b5c89a53, 'name': SearchDatastore_Task, 'duration_secs': 0.011358} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.941591] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1377.942089] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0edadcca-042e-440b-985b-6338e20265fa/0edadcca-042e-440b-985b-6338e20265fa.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1377.942242] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6266316a-d37c-410a-a011-c7b6412f6471 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.951738] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1377.951738] env[63379]: value = "task-1778912" [ 1377.951738] env[63379]: _type = "Task" [ 1377.951738] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.964214] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.967812] env[63379]: DEBUG nova.network.neutron [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Successfully updated port: a3f7ad68-1a71-4217-91b5-0d8a762a15c5 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1378.088535] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Successfully created port: cb52a59c-c52f-446e-b305-8cbd08c646d1 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1378.134878] env[63379]: DEBUG oslo_vmware.api [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778904, 'name': PowerOnVM_Task, 'duration_secs': 0.535122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.135752] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1378.136461] env[63379]: INFO nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Took 18.47 seconds to spawn the instance on the hypervisor. [ 1378.136741] env[63379]: DEBUG nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1378.137614] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c9edfe-22a2-472d-b004-2e156554abf9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.180451] env[63379]: DEBUG nova.policy [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fa958cb524741079d651e388f00f3c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '551ba9289da4445ea0bad784aee2e86d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1378.241346] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778909, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.252658] env[63379]: DEBUG nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1378.338824] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778910, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.386275] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778911, 'name': Rename_Task, 'duration_secs': 0.183176} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.388943] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1378.388943] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a55b989d-824d-4ac6-afe5-1c22b9a9715f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.397824] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1378.397824] env[63379]: value = "task-1778913" [ 1378.397824] env[63379]: _type = "Task" [ 1378.397824] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.405854] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565723} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.406972] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1378.408352] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1378.408352] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b8a4959-caf6-4af0-b612-6f4100e845b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.415193] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.425232] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1378.425232] env[63379]: value = "task-1778914" [ 1378.425232] env[63379]: _type = "Task" [ 1378.425232] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.441960] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778914, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.465948] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778912, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.473662] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "refresh_cache-55fb6899-0321-4bf2-bf3f-2e87dd479433" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.473900] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired lock "refresh_cache-55fb6899-0321-4bf2-bf3f-2e87dd479433" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.474083] env[63379]: DEBUG nova.network.neutron [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1378.576426] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb1bd65-f94a-4181-b390-fdd954361134 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.589753] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937ba197-c7a4-4dd5-a96b-9079c9f5119f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.634039] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8c5ef5-8ac1-4623-9512-dcb7e0a4f156 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.645719] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05b2a21-173b-4357-b6ee-bfc31f57db37 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.673430] env[63379]: DEBUG nova.compute.provider_tree [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1378.677544] env[63379]: INFO nova.compute.manager [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Took 23.30 seconds to build instance. [ 1378.741482] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778909, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.835204] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778910, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.909255] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778913, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.940224] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778914, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091046} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.940569] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1378.941432] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67435073-1196-464e-9d7e-d9b246c1d517 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.967723] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1378.968672] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e43cec11-6f09-4333-82f9-29146d7588e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.993445] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778912, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.822691} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.995146] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0edadcca-042e-440b-985b-6338e20265fa/0edadcca-042e-440b-985b-6338e20265fa.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1378.995379] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1378.995695] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1378.995695] env[63379]: value = "task-1778915" [ 1378.995695] env[63379]: _type = "Task" [ 1378.995695] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.996965] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dcda1f7-a4d7-40b8-a1e8-5da7ca579677 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.008140] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778915, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.015071] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1379.015071] env[63379]: value = "task-1778916" [ 1379.015071] env[63379]: _type = "Task" [ 1379.015071] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.025577] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.123141] env[63379]: DEBUG nova.network.neutron [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1379.179660] env[63379]: DEBUG nova.scheduler.client.report [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1379.184775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e00b5dc6-825a-45fd-bcdc-1ab99f28429d tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.818s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.240989] env[63379]: DEBUG oslo_vmware.api [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778909, 'name': PowerOnVM_Task, 'duration_secs': 1.476696} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.241751] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.241939] env[63379]: INFO nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Took 17.26 seconds to spawn the instance on the hypervisor. [ 1379.242097] env[63379]: DEBUG nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1379.242851] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce9b8c3-985b-4ee3-9258-81afa6ad63fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.257033] env[63379]: DEBUG nova.network.neutron [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Successfully created port: e034314c-72fb-4187-9c6b-1cd2e95aa97a {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.267914] env[63379]: DEBUG nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1379.303510] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1379.303741] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1379.304485] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1379.304485] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1379.304485] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1379.304485] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1379.304787] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1379.304851] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1379.304966] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1379.307029] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1379.307029] env[63379]: DEBUG nova.virt.hardware [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1379.307029] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f95637-8cf9-4ea8-b1e1-b8919af69f15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.316909] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2459c1-eae2-4f4f-83e2-11c1057b92b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.342536] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778910, 'name': PowerOnVM_Task, 'duration_secs': 1.228399} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.342884] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.343704] env[63379]: INFO nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Took 15.11 seconds to spawn the instance on the hypervisor. [ 1379.343704] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1379.344551] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda40e54-b1aa-4608-81d2-736b16d442b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.380706] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Successfully created port: c856b8f8-3490-43b2-b2c2-b96a5c3e550e {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.420724] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778913, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.420724] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "25090d85-cd10-44fc-aa9d-071ada14f249" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.420957] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "25090d85-cd10-44fc-aa9d-071ada14f249" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.510902] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778915, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.523378] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093291} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.523625] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1379.526139] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c2fc8d-855e-4ac8-a3ca-d28b67e73f52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.553115] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 0edadcca-042e-440b-985b-6338e20265fa/0edadcca-042e-440b-985b-6338e20265fa.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1379.554388] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68d91d30-e4a4-4afb-8a9f-72723244937d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.577807] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1379.577807] env[63379]: value = "task-1778917" [ 1379.577807] env[63379]: _type = "Task" [ 1379.577807] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.589838] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778917, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.633727] env[63379]: DEBUG nova.network.neutron [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Updating instance_info_cache with network_info: [{"id": "a3f7ad68-1a71-4217-91b5-0d8a762a15c5", "address": "fa:16:3e:55:b2:ad", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f7ad68-1a", "ovs_interfaceid": "a3f7ad68-1a71-4217-91b5-0d8a762a15c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.652258] env[63379]: DEBUG nova.compute.manager [req-81eca696-5002-4d43-b047-e6e046bc7985 req-540f35e7-4911-4c86-8cc4-5ea6a0a705c2 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Received event network-vif-plugged-a3f7ad68-1a71-4217-91b5-0d8a762a15c5 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1379.652258] env[63379]: DEBUG oslo_concurrency.lockutils [req-81eca696-5002-4d43-b047-e6e046bc7985 req-540f35e7-4911-4c86-8cc4-5ea6a0a705c2 service nova] Acquiring lock "55fb6899-0321-4bf2-bf3f-2e87dd479433-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.652258] env[63379]: DEBUG oslo_concurrency.lockutils [req-81eca696-5002-4d43-b047-e6e046bc7985 req-540f35e7-4911-4c86-8cc4-5ea6a0a705c2 service nova] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.652258] env[63379]: DEBUG oslo_concurrency.lockutils [req-81eca696-5002-4d43-b047-e6e046bc7985 req-540f35e7-4911-4c86-8cc4-5ea6a0a705c2 service nova] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.652572] env[63379]: DEBUG nova.compute.manager [req-81eca696-5002-4d43-b047-e6e046bc7985 req-540f35e7-4911-4c86-8cc4-5ea6a0a705c2 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] No waiting events found dispatching network-vif-plugged-a3f7ad68-1a71-4217-91b5-0d8a762a15c5 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1379.652997] env[63379]: WARNING nova.compute.manager [req-81eca696-5002-4d43-b047-e6e046bc7985 req-540f35e7-4911-4c86-8cc4-5ea6a0a705c2 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Received unexpected event network-vif-plugged-a3f7ad68-1a71-4217-91b5-0d8a762a15c5 for instance with vm_state building and task_state spawning. [ 1379.686713] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.687608] env[63379]: DEBUG nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1379.696651] env[63379]: DEBUG nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1379.769542] env[63379]: INFO nova.compute.manager [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Took 23.49 seconds to build instance. [ 1379.871665] env[63379]: INFO nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Took 22.60 seconds to build instance. [ 1379.911960] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778913, 'name': PowerOnVM_Task, 'duration_secs': 1.429784} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.912555] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.912936] env[63379]: INFO nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Took 13.38 seconds to spawn the instance on the hypervisor. [ 1379.914676] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1379.914676] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff7bc87-cf82-4945-92d9-77ffa937f2b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.010354] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778915, 'name': ReconfigVM_Task, 'duration_secs': 0.954867} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.010872] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Reconfigured VM instance instance-00000006 to attach disk [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1380.012184] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff90692c-e585-44be-bdc6-94d06a49db1d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.022967] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1380.022967] env[63379]: value = "task-1778918" [ 1380.022967] env[63379]: _type = "Task" [ 1380.022967] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.036763] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778918, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.090882] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778917, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.136244] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Releasing lock "refresh_cache-55fb6899-0321-4bf2-bf3f-2e87dd479433" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.136680] env[63379]: DEBUG nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Instance network_info: |[{"id": "a3f7ad68-1a71-4217-91b5-0d8a762a15c5", "address": "fa:16:3e:55:b2:ad", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f7ad68-1a", "ovs_interfaceid": "a3f7ad68-1a71-4217-91b5-0d8a762a15c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1380.137468] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:b2:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3f7ad68-1a71-4217-91b5-0d8a762a15c5', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1380.146089] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Creating folder: Project (862e52cb5e924bbebb353a9ced8f5e80). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.146440] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b48a9b7e-d61b-405f-9c58-186e3be48be6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.161167] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Created folder: Project (862e52cb5e924bbebb353a9ced8f5e80) in parent group-v369214. [ 1380.161380] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Creating folder: Instances. Parent ref: group-v369233. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.161644] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a08aa32a-3644-4b2f-a8ba-af2069ef8033 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.197532] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Created folder: Instances in parent group-v369233. [ 1380.197774] env[63379]: DEBUG oslo.service.loopingcall [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.198518] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1380.198518] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c4f011f-ac86-46dd-967e-dd2fdb4b4f19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.222230] env[63379]: DEBUG nova.compute.utils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1380.226384] env[63379]: DEBUG nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1380.226384] env[63379]: DEBUG nova.network.neutron [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1380.237164] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.237164] env[63379]: value = "task-1778921" [ 1380.237164] env[63379]: _type = "Task" [ 1380.237164] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.245481] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778921, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.250925] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.250925] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.251867] env[63379]: INFO nova.compute.claims [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.271107] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b09ca1d6-f51d-4304-bc75-b9ec1ed7a279 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.002s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.371245] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.108s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.380368] env[63379]: DEBUG nova.policy [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a5826f7214e4ba1b943c90dd6a3db55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '565c1cd0648646e19d73a6b1546ba28f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1380.439814] env[63379]: INFO nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Took 23.13 seconds to build instance. [ 1380.538759] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778918, 'name': Rename_Task, 'duration_secs': 0.19919} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.538759] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1380.538759] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7927606e-a74f-412e-b651-5ca22c733255 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.546875] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1380.546875] env[63379]: value = "task-1778922" [ 1380.546875] env[63379]: _type = "Task" [ 1380.546875] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.560931] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.593172] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778917, 'name': ReconfigVM_Task, 'duration_secs': 0.592771} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.593172] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 0edadcca-042e-440b-985b-6338e20265fa/0edadcca-042e-440b-985b-6338e20265fa.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1380.593789] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f496f25c-fbb1-4105-b3d5-1a9d3032bb6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.604542] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1380.604542] env[63379]: value = "task-1778923" [ 1380.604542] env[63379]: _type = "Task" [ 1380.604542] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.619405] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778923, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.729745] env[63379]: DEBUG nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1380.746684] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778921, 'name': CreateVM_Task, 'duration_secs': 0.498859} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.746874] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1380.747598] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.747770] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.748105] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1380.748409] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24dc36e0-6764-41fb-9a90-7118baf4c355 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.760305] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1380.760305] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52abc16e-8f05-30e8-9701-1b182451833e" [ 1380.760305] env[63379]: _type = "Task" [ 1380.760305] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.777718] env[63379]: DEBUG nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1380.781710] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52abc16e-8f05-30e8-9701-1b182451833e, 'name': SearchDatastore_Task, 'duration_secs': 0.012615} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.782320] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.782602] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1380.783599] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.783599] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.783599] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1380.784050] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58824abd-d458-4b40-ad4c-5b2456674022 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.799290] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1380.799889] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1380.801358] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1df01c25-e589-44d9-a1e1-5d4e33dee264 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.817531] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1380.817531] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a20c0-73f1-4f6c-c5e1-6bb292c85068" [ 1380.817531] env[63379]: _type = "Task" [ 1380.817531] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.833632] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a20c0-73f1-4f6c-c5e1-6bb292c85068, 'name': SearchDatastore_Task, 'duration_secs': 0.014164} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.834648] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca157157-75c6-49fa-8edf-21bc639747bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.842981] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1380.842981] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5296b62b-46e4-65fa-db28-0b086b6d1b96" [ 1380.842981] env[63379]: _type = "Task" [ 1380.842981] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.857398] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5296b62b-46e4-65fa-db28-0b086b6d1b96, 'name': SearchDatastore_Task} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.857676] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.857938] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 55fb6899-0321-4bf2-bf3f-2e87dd479433/55fb6899-0321-4bf2-bf3f-2e87dd479433.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1380.858210] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b960795-ba7f-4571-aa43-e84e67330233 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.869790] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1380.869790] env[63379]: value = "task-1778924" [ 1380.869790] env[63379]: _type = "Task" [ 1380.869790] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.875407] env[63379]: DEBUG nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1380.884045] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.947486] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.651s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.060493] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778922, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.115067] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778923, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.171447] env[63379]: DEBUG nova.network.neutron [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Successfully created port: 775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1381.298991] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.391574] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778924, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.421436] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.454665] env[63379]: DEBUG nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1381.524783] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "aaaf4b06-ef84-41ba-8054-29582854a9f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.525090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.576600] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778922, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.623122] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778923, 'name': Rename_Task, 'duration_secs': 0.817877} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.626494] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1381.627121] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fdeea5b9-b88a-4ffc-bea5-53487e0bc26f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.640537] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1381.640537] env[63379]: value = "task-1778925" [ 1381.640537] env[63379]: _type = "Task" [ 1381.640537] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.644831] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc111e12-432f-4c0d-8b86-d4657c07a25b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.659229] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.660458] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5c9cd5-dd43-4ed9-b09f-537a34b7d6fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.709245] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b880500d-e4ce-4f4b-bd85-31162854909e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.721800] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198ced58-41b7-42ee-9fc4-173997171b0e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.742233] env[63379]: DEBUG nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1381.745028] env[63379]: DEBUG nova.compute.provider_tree [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.791076] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1381.791289] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1381.791447] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1381.791629] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1381.791875] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1381.792229] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1381.792310] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1381.792422] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1381.792603] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1381.792738] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1381.792928] env[63379]: DEBUG nova.virt.hardware [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1381.794408] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a12cf80-ee3a-42b9-a112-31fbb58248af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.808565] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1a3e90-c2d5-47a8-8ff9-1b8429139ec7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.879863] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.933282} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.880119] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 55fb6899-0321-4bf2-bf3f-2e87dd479433/55fb6899-0321-4bf2-bf3f-2e87dd479433.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1381.880325] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1381.880572] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b634d244-1a69-4a63-b895-07ca2893074c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.889321] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1381.889321] env[63379]: value = "task-1778926" [ 1381.889321] env[63379]: _type = "Task" [ 1381.889321] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.899646] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.993022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.067084] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778922, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.070960] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Acquiring lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.071415] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.133330] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Successfully updated port: b26a8dba-cd30-4320-901e-8e9a8584ea6f {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1382.151653] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778925, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.249716] env[63379]: DEBUG nova.scheduler.client.report [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.285522] env[63379]: DEBUG nova.network.neutron [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Successfully updated port: e034314c-72fb-4187-9c6b-1cd2e95aa97a {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1382.401840] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118038} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.402564] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1382.403548] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05536118-ab37-41c9-9203-02b17687faa7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.434337] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 55fb6899-0321-4bf2-bf3f-2e87dd479433/55fb6899-0321-4bf2-bf3f-2e87dd479433.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1382.435418] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-324d3c6c-e5db-485b-bcfa-2450431d043c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.462207] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1382.462207] env[63379]: value = "task-1778927" [ 1382.462207] env[63379]: _type = "Task" [ 1382.462207] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.473687] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778927, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.569886] env[63379]: DEBUG oslo_vmware.api [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778922, 'name': PowerOnVM_Task, 'duration_secs': 1.692909} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.570411] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1382.570849] env[63379]: INFO nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Took 10.50 seconds to spawn the instance on the hypervisor. [ 1382.571243] env[63379]: DEBUG nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1382.572694] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3df213-234b-4526-bf63-a22244b0180c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.656134] env[63379]: DEBUG oslo_vmware.api [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778925, 'name': PowerOnVM_Task, 'duration_secs': 0.986853} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.656420] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1382.656725] env[63379]: INFO nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Took 12.89 seconds to spawn the instance on the hypervisor. [ 1382.656803] env[63379]: DEBUG nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1382.657648] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee1708d-a98f-419b-8610-2d3a9dd43513 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.760971] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.761688] env[63379]: DEBUG nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1382.770179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.471s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.771945] env[63379]: INFO nova.compute.claims [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1382.795183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.795183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.795183] env[63379]: DEBUG nova.network.neutron [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.977516] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778927, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.097392] env[63379]: INFO nova.compute.manager [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Took 24.99 seconds to build instance. [ 1383.188878] env[63379]: INFO nova.compute.manager [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Took 25.85 seconds to build instance. [ 1383.260990] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "571bb238-9cf3-475e-b596-a9609acc8696" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.261270] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "571bb238-9cf3-475e-b596-a9609acc8696" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.271634] env[63379]: DEBUG nova.compute.utils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1383.274184] env[63379]: DEBUG nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1383.274228] env[63379]: DEBUG nova.network.neutron [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1383.301965] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "d221329b-eee4-42f5-bb27-cf6af0386c04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.302235] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.374171] env[63379]: DEBUG nova.network.neutron [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1383.447988] env[63379]: DEBUG nova.policy [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e8c6799212b4e9a921e8288205c4536', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98188c6c89a74f74980b83d84e621f15', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1383.475540] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778927, 'name': ReconfigVM_Task, 'duration_secs': 0.569092} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.475817] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 55fb6899-0321-4bf2-bf3f-2e87dd479433/55fb6899-0321-4bf2-bf3f-2e87dd479433.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1383.476483] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29735546-b78e-4b1b-9739-34c0ef7ec401 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.486022] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1383.486022] env[63379]: value = "task-1778928" [ 1383.486022] env[63379]: _type = "Task" [ 1383.486022] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.493775] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778928, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.521255] env[63379]: DEBUG nova.network.neutron [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Successfully updated port: 775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1383.553067] env[63379]: DEBUG oslo_concurrency.lockutils [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "0aab61e4-c055-4872-973a-20fa6802ec10" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.553204] env[63379]: DEBUG oslo_concurrency.lockutils [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.553342] env[63379]: DEBUG nova.compute.manager [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1383.554519] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51eec11d-ae37-4b0b-b6ec-07db71ec8870 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.562852] env[63379]: DEBUG nova.compute.manager [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1383.563178] env[63379]: DEBUG nova.objects.instance [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lazy-loading 'flavor' on Instance uuid 0aab61e4-c055-4872-973a-20fa6802ec10 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1383.601262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-61c46af4-f815-45e1-b6b6-a1d4f5a06d94 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "a6f7c217-a493-403d-b776-870df4575f2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.498s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.690837] env[63379]: DEBUG oslo_concurrency.lockutils [None req-191173e7-e1af-488d-9f88-571ee522ff24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "0edadcca-042e-440b-985b-6338e20265fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.358s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.730489] env[63379]: DEBUG nova.compute.manager [req-097c542e-c04c-4ec2-9652-acadf06f1357 req-a2d1bf35-f7c0-43e1-b714-54c65cf41923 service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Received event network-vif-plugged-e034314c-72fb-4187-9c6b-1cd2e95aa97a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1383.730679] env[63379]: DEBUG oslo_concurrency.lockutils [req-097c542e-c04c-4ec2-9652-acadf06f1357 req-a2d1bf35-f7c0-43e1-b714-54c65cf41923 service nova] Acquiring lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.730973] env[63379]: DEBUG oslo_concurrency.lockutils [req-097c542e-c04c-4ec2-9652-acadf06f1357 req-a2d1bf35-f7c0-43e1-b714-54c65cf41923 service nova] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.732392] env[63379]: DEBUG oslo_concurrency.lockutils [req-097c542e-c04c-4ec2-9652-acadf06f1357 req-a2d1bf35-f7c0-43e1-b714-54c65cf41923 service nova] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.732642] env[63379]: DEBUG nova.compute.manager [req-097c542e-c04c-4ec2-9652-acadf06f1357 req-a2d1bf35-f7c0-43e1-b714-54c65cf41923 service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] No waiting events found dispatching network-vif-plugged-e034314c-72fb-4187-9c6b-1cd2e95aa97a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1383.733116] env[63379]: WARNING nova.compute.manager [req-097c542e-c04c-4ec2-9652-acadf06f1357 req-a2d1bf35-f7c0-43e1-b714-54c65cf41923 service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Received unexpected event network-vif-plugged-e034314c-72fb-4187-9c6b-1cd2e95aa97a for instance with vm_state building and task_state spawning. [ 1383.751408] env[63379]: DEBUG nova.network.neutron [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [{"id": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "address": "fa:16:3e:d2:92:4e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape034314c-72", "ovs_interfaceid": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.759792] env[63379]: DEBUG nova.compute.manager [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Received event network-changed-a3f7ad68-1a71-4217-91b5-0d8a762a15c5 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1383.759987] env[63379]: DEBUG nova.compute.manager [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Refreshing instance network info cache due to event network-changed-a3f7ad68-1a71-4217-91b5-0d8a762a15c5. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1383.760219] env[63379]: DEBUG oslo_concurrency.lockutils [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] Acquiring lock "refresh_cache-55fb6899-0321-4bf2-bf3f-2e87dd479433" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.760428] env[63379]: DEBUG oslo_concurrency.lockutils [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] Acquired lock "refresh_cache-55fb6899-0321-4bf2-bf3f-2e87dd479433" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.760510] env[63379]: DEBUG nova.network.neutron [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Refreshing network info cache for port a3f7ad68-1a71-4217-91b5-0d8a762a15c5 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.777828] env[63379]: DEBUG nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1383.905922] env[63379]: DEBUG nova.compute.manager [None req-808954b4-fa9c-4033-b577-da29f894dfa7 tempest-ServerDiagnosticsTest-668159827 tempest-ServerDiagnosticsTest-668159827-project-admin] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1383.906069] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa511d6-f933-4c2b-a3f2-b68e8dab8e09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.917882] env[63379]: INFO nova.compute.manager [None req-808954b4-fa9c-4033-b577-da29f894dfa7 tempest-ServerDiagnosticsTest-668159827 tempest-ServerDiagnosticsTest-668159827-project-admin] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Retrieving diagnostics [ 1383.918729] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c5e423-0970-40c1-9730-239b58cacb49 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.004761] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778928, 'name': Rename_Task, 'duration_secs': 0.305424} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.005481] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1384.005987] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3cfcb27-9802-465a-99f8-549efb7a0f08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.015271] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1384.015271] env[63379]: value = "task-1778929" [ 1384.015271] env[63379]: _type = "Task" [ 1384.015271] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.023927] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.024087] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquired lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.024274] env[63379]: DEBUG nova.network.neutron [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1384.034108] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.073471] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1384.073844] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19edb8f6-2c32-471a-a1f9-cfbbed8f8f53 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.083653] env[63379]: DEBUG oslo_vmware.api [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1384.083653] env[63379]: value = "task-1778930" [ 1384.083653] env[63379]: _type = "Task" [ 1384.083653] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.105896] env[63379]: DEBUG nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1384.111273] env[63379]: DEBUG oslo_vmware.api [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.195748] env[63379]: DEBUG nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1384.203573] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594a04a6-0059-4047-bf12-cb8eedd510ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.206711] env[63379]: DEBUG nova.network.neutron [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Successfully created port: a7d101c2-09da-4502-aa7a-988de81f6ee7 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1384.215881] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082e7417-4afd-4dc3-8e9d-87c0f2217eda {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.254382] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.254688] env[63379]: DEBUG nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Instance network_info: |[{"id": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "address": "fa:16:3e:d2:92:4e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape034314c-72", "ovs_interfaceid": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1384.255565] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9f9f58-bd97-4dbe-9f60-f20d027332ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.258971] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:92:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e034314c-72fb-4187-9c6b-1cd2e95aa97a', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1384.266845] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Creating folder: Project (551ba9289da4445ea0bad784aee2e86d). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1384.270047] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9c403a6-aa73-4a16-a379-9026df26b859 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.277428] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28fc366-b094-431d-8343-d01d88c7e3fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.284311] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Created folder: Project (551ba9289da4445ea0bad784aee2e86d) in parent group-v369214. [ 1384.284677] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Creating folder: Instances. Parent ref: group-v369236. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1384.288820] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfad3003-f427-4a50-a752-c32d3c3374b3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.300767] env[63379]: DEBUG nova.compute.provider_tree [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.314006] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Created folder: Instances in parent group-v369236. [ 1384.314281] env[63379]: DEBUG oslo.service.loopingcall [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1384.314520] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1384.315351] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08b86f08-9525-430b-9a34-358e2bbabdf0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.336937] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1384.336937] env[63379]: value = "task-1778933" [ 1384.336937] env[63379]: _type = "Task" [ 1384.336937] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.346932] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778933, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.537459] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778929, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.601065] env[63379]: DEBUG oslo_vmware.api [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778930, 'name': PowerOffVM_Task, 'duration_secs': 0.317776} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.603977] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1384.605538] env[63379]: DEBUG nova.compute.manager [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1384.605538] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8534689c-e7c7-4cec-8db5-54da420c6e73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.609238] env[63379]: DEBUG nova.network.neutron [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1384.640716] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.731722] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.793213] env[63379]: DEBUG nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1384.804928] env[63379]: DEBUG nova.scheduler.client.report [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1384.845191] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1384.845879] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1384.846127] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1384.847063] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1384.847063] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1384.847063] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1384.847217] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1384.847915] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1384.848144] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1384.848413] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1384.848561] env[63379]: DEBUG nova.virt.hardware [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1384.850172] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878e12ca-6ff2-46db-8f86-d87f2b1039ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.864858] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778933, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.867687] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f6ac26-1fb4-448b-8c25-7238d52b9981 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.989256] env[63379]: DEBUG nova.network.neutron [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Updated VIF entry in instance network info cache for port a3f7ad68-1a71-4217-91b5-0d8a762a15c5. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1384.989256] env[63379]: DEBUG nova.network.neutron [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Updating instance_info_cache with network_info: [{"id": "a3f7ad68-1a71-4217-91b5-0d8a762a15c5", "address": "fa:16:3e:55:b2:ad", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f7ad68-1a", "ovs_interfaceid": "a3f7ad68-1a71-4217-91b5-0d8a762a15c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.014010] env[63379]: DEBUG nova.network.neutron [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Updating instance_info_cache with network_info: [{"id": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "address": "fa:16:3e:a6:1c:0d", "network": {"id": "199703e5-fc18-408d-a14c-e309d6e3fb78", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1056046585-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "565c1cd0648646e19d73a6b1546ba28f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap775d733a-ad", "ovs_interfaceid": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.031550] env[63379]: DEBUG oslo_vmware.api [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1778929, 'name': PowerOnVM_Task, 'duration_secs': 0.87332} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.031812] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1385.033736] env[63379]: INFO nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1385.033736] env[63379]: DEBUG nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1385.036221] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97775593-e435-4c13-b409-577d217fef89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.038867] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Successfully updated port: cb52a59c-c52f-446e-b305-8cbd08c646d1 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1385.128219] env[63379]: DEBUG oslo_concurrency.lockutils [None req-90103b3d-09ab-4e62-9c6e-064ee6d479f3 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.575s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.312676] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.313261] env[63379]: DEBUG nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1385.319331] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.898s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.325906] env[63379]: INFO nova.compute.claims [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1385.352172] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778933, 'name': CreateVM_Task, 'duration_secs': 0.525938} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.352358] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1385.353206] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.353206] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.353592] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1385.355021] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2f5fd30-1a0b-4dc2-acb0-69ffdc83059c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.362641] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1385.362641] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527504f6-8b28-bd3c-a926-bff149b230d2" [ 1385.362641] env[63379]: _type = "Task" [ 1385.362641] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.375167] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527504f6-8b28-bd3c-a926-bff149b230d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.493033] env[63379]: DEBUG oslo_concurrency.lockutils [req-b59a48de-4566-4baa-a1e9-668cb5145f75 req-611a35c1-5bb5-4092-8d2b-442211421b24 service nova] Releasing lock "refresh_cache-55fb6899-0321-4bf2-bf3f-2e87dd479433" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.520153] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Releasing lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.520546] env[63379]: DEBUG nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Instance network_info: |[{"id": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "address": "fa:16:3e:a6:1c:0d", "network": {"id": "199703e5-fc18-408d-a14c-e309d6e3fb78", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1056046585-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "565c1cd0648646e19d73a6b1546ba28f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap775d733a-ad", "ovs_interfaceid": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1385.521053] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:1c:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4d548e7-d762-406a-bb2d-dc7168a8ca67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '775d733a-ad5b-4b39-aa69-8b4a577c4348', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1385.532218] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Creating folder: Project (565c1cd0648646e19d73a6b1546ba28f). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1385.533037] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8540b282-d7b6-44c2-b6e0-c2cd287a7be5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.547829] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Created folder: Project (565c1cd0648646e19d73a6b1546ba28f) in parent group-v369214. [ 1385.548108] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Creating folder: Instances. Parent ref: group-v369239. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1385.548322] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6e94a86-7053-4e7a-9c14-a194384fc54d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.563393] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Created folder: Instances in parent group-v369239. [ 1385.563675] env[63379]: DEBUG oslo.service.loopingcall [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1385.566805] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1385.566805] env[63379]: INFO nova.compute.manager [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Took 26.71 seconds to build instance. [ 1385.567757] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdcef6a6-71f0-432b-afc2-f35072df0627 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.590825] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1385.590825] env[63379]: value = "task-1778936" [ 1385.590825] env[63379]: _type = "Task" [ 1385.590825] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.600080] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778936, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.832042] env[63379]: DEBUG nova.compute.utils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1385.837097] env[63379]: DEBUG nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Not allocating networking since 'none' was specified. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1385.877810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "6b4e80fc-582f-432b-aa99-ec133127578e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.878884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "6b4e80fc-582f-432b-aa99-ec133127578e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.885716] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527504f6-8b28-bd3c-a926-bff149b230d2, 'name': SearchDatastore_Task, 'duration_secs': 0.015086} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.886305] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.886305] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1385.886883] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.886883] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.886883] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1385.887352] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d869c59-cf04-4166-b5dc-682ceaa9d625 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.900865] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1385.901140] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1385.904436] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4e19fbc-3d53-479d-ac89-710a7b2c06d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.914188] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1385.914188] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52667ff0-a9c4-f43a-9869-70db64a80832" [ 1385.914188] env[63379]: _type = "Task" [ 1385.914188] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.923545] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52667ff0-a9c4-f43a-9869-70db64a80832, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.956691] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.956920] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.085429] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4392fe1-a1c7-4621-b10f-ced79c498e57 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.235s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.105888] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778936, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.338103] env[63379]: DEBUG nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1386.438916] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52667ff0-a9c4-f43a-9869-70db64a80832, 'name': SearchDatastore_Task, 'duration_secs': 0.018195} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.439624] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46cfbbb1-b01e-4540-a835-edc3bea0e641 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.451720] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1386.451720] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ea4302-81b6-6600-7dd5-98314d5b26b3" [ 1386.451720] env[63379]: _type = "Task" [ 1386.451720] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.467110] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ea4302-81b6-6600-7dd5-98314d5b26b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.594247] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1386.608174] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778936, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.635765] env[63379]: DEBUG nova.network.neutron [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Successfully updated port: a7d101c2-09da-4502-aa7a-988de81f6ee7 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1386.701982] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe63bb9-7f78-4333-97d7-858a28f1e0ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.712408] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529ec67f-d733-4f18-af96-f73a25e1d7bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.753325] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729ef679-e646-466e-9d02-a3aada8058b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.765725] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a783237a-5aa0-4980-8886-fe00e2ae8511 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.783847] env[63379]: DEBUG nova.compute.provider_tree [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.901822] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "724c7a22-1833-4dc5-ab38-a11498a83ab8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.901974] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.902213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "724c7a22-1833-4dc5-ab38-a11498a83ab8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.902802] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.902802] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.905251] env[63379]: INFO nova.compute.manager [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Terminating instance [ 1386.910740] env[63379]: DEBUG nova.compute.manager [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1386.910740] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1386.910740] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7009c4f-f5ef-4150-b459-9d8113bc4cdf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.924478] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1386.924836] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce6797e5-9950-4046-8160-d371e2aa40e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.937368] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1386.937368] env[63379]: value = "task-1778937" [ 1386.937368] env[63379]: _type = "Task" [ 1386.937368] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.950647] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.963494] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ea4302-81b6-6600-7dd5-98314d5b26b3, 'name': SearchDatastore_Task, 'duration_secs': 0.028099} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.963764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.964395] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae/aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1386.964395] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ff5a11b-5ff6-4996-8368-0323272d0c21 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.972937] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1386.972937] env[63379]: value = "task-1778938" [ 1386.972937] env[63379]: _type = "Task" [ 1386.972937] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.988469] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.055597] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.055936] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.107611] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778936, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.128744] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.141742] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "refresh_cache-5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.141970] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquired lock "refresh_cache-5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.142175] env[63379]: DEBUG nova.network.neutron [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.287908] env[63379]: DEBUG nova.scheduler.client.report [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.355453] env[63379]: DEBUG nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1387.358927] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Successfully updated port: c856b8f8-3490-43b2-b2c2-b96a5c3e550e {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1387.383096] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1387.383659] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1387.383659] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1387.383851] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1387.384049] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1387.384235] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1387.384480] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1387.384672] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1387.384870] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1387.385208] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1387.385464] env[63379]: DEBUG nova.virt.hardware [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1387.387079] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa55a5c5-a2c3-402a-a118-80b3e2938459 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.398347] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001167d2-994d-4366-9d9e-9fd496841f73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.423064] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1387.429943] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Creating folder: Project (e7f0f3fb3268498b9184c4f40a7162cb). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1387.430768] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3fbaecf-cd52-470b-ad3d-dada15214602 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.446517] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Created folder: Project (e7f0f3fb3268498b9184c4f40a7162cb) in parent group-v369214. [ 1387.446794] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Creating folder: Instances. Parent ref: group-v369242. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1387.448516] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6794426-b9e8-43c0-aa73-31f6d37c866d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.452540] env[63379]: DEBUG nova.compute.manager [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Received event network-changed-e034314c-72fb-4187-9c6b-1cd2e95aa97a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1387.452809] env[63379]: DEBUG nova.compute.manager [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Refreshing instance network info cache due to event network-changed-e034314c-72fb-4187-9c6b-1cd2e95aa97a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1387.453120] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Acquiring lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.453383] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Acquired lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.453639] env[63379]: DEBUG nova.network.neutron [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Refreshing network info cache for port e034314c-72fb-4187-9c6b-1cd2e95aa97a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1387.458608] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.472609] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Created folder: Instances in parent group-v369242. [ 1387.473093] env[63379]: DEBUG oslo.service.loopingcall [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.475560] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1387.477497] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-vif-plugged-b26a8dba-cd30-4320-901e-8e9a8584ea6f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1387.477799] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Acquiring lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.478049] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.478284] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.478661] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] No waiting events found dispatching network-vif-plugged-b26a8dba-cd30-4320-901e-8e9a8584ea6f {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1387.478661] env[63379]: WARNING nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received unexpected event network-vif-plugged-b26a8dba-cd30-4320-901e-8e9a8584ea6f for instance with vm_state building and task_state spawning. [ 1387.478884] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-changed-b26a8dba-cd30-4320-901e-8e9a8584ea6f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1387.479062] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Refreshing instance network info cache due to event network-changed-b26a8dba-cd30-4320-901e-8e9a8584ea6f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1387.479287] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Acquiring lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.479449] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Acquired lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.479666] env[63379]: DEBUG nova.network.neutron [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Refreshing network info cache for port b26a8dba-cd30-4320-901e-8e9a8584ea6f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1387.481698] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3be948f-da05-455c-8d05-6b9abe11e005 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.513327] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778938, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.514972] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1387.514972] env[63379]: value = "task-1778941" [ 1387.514972] env[63379]: _type = "Task" [ 1387.514972] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.529663] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778941, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.554518] env[63379]: DEBUG nova.network.neutron [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1387.616177] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778936, 'name': CreateVM_Task, 'duration_secs': 1.991229} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.616177] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1387.617609] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.617609] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.617609] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.618391] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-428c110f-8aed-4c95-b898-64aba2424f36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.633028] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1387.633028] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5287276c-7f60-6a18-9015-5e03b5fea792" [ 1387.633028] env[63379]: _type = "Task" [ 1387.633028] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.648390] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5287276c-7f60-6a18-9015-5e03b5fea792, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.688957] env[63379]: DEBUG nova.network.neutron [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.722931] env[63379]: DEBUG nova.network.neutron [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1387.750986] env[63379]: INFO nova.compute.manager [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Rebuilding instance [ 1387.794917] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.795429] env[63379]: DEBUG nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1387.798213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.805s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.799758] env[63379]: INFO nova.compute.claims [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1387.828037] env[63379]: DEBUG nova.compute.manager [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1387.828478] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60883616-f4d8-4803-b471-5a875a0953dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.865478] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.947723] env[63379]: DEBUG nova.network.neutron [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Updating instance_info_cache with network_info: [{"id": "a7d101c2-09da-4502-aa7a-988de81f6ee7", "address": "fa:16:3e:dd:9a:15", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d101c2-09", "ovs_interfaceid": "a7d101c2-09da-4502-aa7a-988de81f6ee7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.955338] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778937, 'name': PowerOffVM_Task, 'duration_secs': 0.780288} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.955597] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1387.955765] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1387.956018] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e5d624f-8ee8-4cae-be37-2b24eb007d6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.987395] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703154} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.988048] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae/aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1387.988177] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1387.988492] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7a6c7be-62f9-45e2-8c70-b07ce51ff7eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.996557] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1387.996557] env[63379]: value = "task-1778943" [ 1387.996557] env[63379]: _type = "Task" [ 1387.996557] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.008803] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.027131] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778941, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.109321] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1388.109702] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1388.110542] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Deleting the datastore file [datastore1] 724c7a22-1833-4dc5-ab38-a11498a83ab8 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1388.110542] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d2f2638-b025-4c41-b6d9-b5fcd06ead01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.119805] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for the task: (returnval){ [ 1388.119805] env[63379]: value = "task-1778944" [ 1388.119805] env[63379]: _type = "Task" [ 1388.119805] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.132661] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.143054] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5287276c-7f60-6a18-9015-5e03b5fea792, 'name': SearchDatastore_Task, 'duration_secs': 0.032798} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.143390] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.143627] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1388.143878] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.144050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.144239] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1388.144494] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12baa25b-4a0e-4413-a9e8-5af55c88536a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.157294] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1388.157415] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1388.158570] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a444d0f9-4c39-4537-aef3-6d418cc58aa1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.165715] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1388.165715] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e30cdc-cda8-b882-28c2-b65a7fab5195" [ 1388.165715] env[63379]: _type = "Task" [ 1388.165715] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.176711] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e30cdc-cda8-b882-28c2-b65a7fab5195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.192265] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Releasing lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.192703] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Received event network-vif-plugged-775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1388.192810] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Acquiring lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.192960] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.193137] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.193299] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] No waiting events found dispatching network-vif-plugged-775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1388.193488] env[63379]: WARNING nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Received unexpected event network-vif-plugged-775d733a-ad5b-4b39-aa69-8b4a577c4348 for instance with vm_state building and task_state spawning. [ 1388.193671] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Received event network-changed-775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1388.194017] env[63379]: DEBUG nova.compute.manager [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Refreshing instance network info cache due to event network-changed-775d733a-ad5b-4b39-aa69-8b4a577c4348. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1388.194081] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Acquiring lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.194278] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Acquired lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.195028] env[63379]: DEBUG nova.network.neutron [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Refreshing network info cache for port 775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1388.195867] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.196046] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1388.219816] env[63379]: DEBUG nova.network.neutron [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updated VIF entry in instance network info cache for port e034314c-72fb-4187-9c6b-1cd2e95aa97a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1388.220229] env[63379]: DEBUG nova.network.neutron [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [{"id": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "address": "fa:16:3e:d2:92:4e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape034314c-72", "ovs_interfaceid": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.311641] env[63379]: DEBUG nova.compute.utils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1388.315678] env[63379]: DEBUG nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1388.315854] env[63379]: DEBUG nova.network.neutron [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1388.352256] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1388.352256] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f5ebb31-b1b1-40f4-ba9d-f3983798c0fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.362287] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1388.362287] env[63379]: value = "task-1778945" [ 1388.362287] env[63379]: _type = "Task" [ 1388.362287] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.362991] env[63379]: DEBUG nova.policy [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58a15ee55d144311ab7f0a572416da0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea2c1f9216ee4d8e8349a27de543c2d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1388.376330] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.451851] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Releasing lock "refresh_cache-5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.455021] env[63379]: DEBUG nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Instance network_info: |[{"id": "a7d101c2-09da-4502-aa7a-988de81f6ee7", "address": "fa:16:3e:dd:9a:15", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d101c2-09", "ovs_interfaceid": "a7d101c2-09da-4502-aa7a-988de81f6ee7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1388.455258] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:9a:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7d101c2-09da-4502-aa7a-988de81f6ee7', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1388.462268] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Creating folder: Project (98188c6c89a74f74980b83d84e621f15). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.465703] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8900118c-3deb-48d4-85b0-ef98cd6cb61b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.480026] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Created folder: Project (98188c6c89a74f74980b83d84e621f15) in parent group-v369214. [ 1388.480026] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Creating folder: Instances. Parent ref: group-v369245. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.480026] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94f0e97b-9021-43de-ad62-413b2b67032b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.490379] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Created folder: Instances in parent group-v369245. [ 1388.490935] env[63379]: DEBUG oslo.service.loopingcall [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1388.491296] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1388.491701] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25287886-349c-439c-806b-aaf777963610 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.523549] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.525934] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1388.525934] env[63379]: value = "task-1778948" [ 1388.525934] env[63379]: _type = "Task" [ 1388.525934] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.534376] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778941, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.540390] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778948, 'name': CreateVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.631899] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.679214] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e30cdc-cda8-b882-28c2-b65a7fab5195, 'name': SearchDatastore_Task, 'duration_secs': 0.038224} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.679214] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ed8f64d-3f63-486f-8154-f3493e2817a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.685531] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1388.685531] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52502df2-d704-ffa3-e8f6-74a2addf36a9" [ 1388.685531] env[63379]: _type = "Task" [ 1388.685531] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.705176] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52502df2-d704-ffa3-e8f6-74a2addf36a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.725565] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Releasing lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.725565] env[63379]: DEBUG nova.compute.manager [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-vif-plugged-cb52a59c-c52f-446e-b305-8cbd08c646d1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1388.725565] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Acquiring lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.725565] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.725565] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.725809] env[63379]: DEBUG nova.compute.manager [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] No waiting events found dispatching network-vif-plugged-cb52a59c-c52f-446e-b305-8cbd08c646d1 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1388.725809] env[63379]: WARNING nova.compute.manager [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received unexpected event network-vif-plugged-cb52a59c-c52f-446e-b305-8cbd08c646d1 for instance with vm_state building and task_state spawning. [ 1388.725809] env[63379]: DEBUG nova.compute.manager [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-changed-cb52a59c-c52f-446e-b305-8cbd08c646d1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1388.725809] env[63379]: DEBUG nova.compute.manager [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Refreshing instance network info cache due to event network-changed-cb52a59c-c52f-446e-b305-8cbd08c646d1. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1388.725809] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Acquiring lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.735599] env[63379]: DEBUG nova.compute.manager [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1388.736082] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dd00b0-f166-4ca6-a607-27b804c84e2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.781693] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1388.818613] env[63379]: DEBUG nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1388.896264] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778945, 'name': PowerOffVM_Task, 'duration_secs': 0.402726} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.899266] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1388.899492] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1388.902581] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3354f31c-171d-4191-97f8-36e542b847ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.911698] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1388.911969] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c406b391-e7a5-42e0-b716-99de1eac2e1b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.945413] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1388.945787] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1388.945884] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Deleting the datastore file [datastore1] a6f7c217-a493-403d-b776-870df4575f2a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1388.949155] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85a06e06-ed21-4778-b002-19604e3df2c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.957030] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1388.957030] env[63379]: value = "task-1778950" [ 1388.957030] env[63379]: _type = "Task" [ 1388.957030] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.965674] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.024898] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.893567} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.024898] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1389.025177] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b32f10-f634-4091-b610-f32f97defd53 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.034872] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778941, 'name': CreateVM_Task, 'duration_secs': 1.395651} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.036711] env[63379]: DEBUG nova.network.neutron [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Updated VIF entry in instance network info cache for port 775d733a-ad5b-4b39-aa69-8b4a577c4348. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1389.036813] env[63379]: DEBUG nova.network.neutron [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Updating instance_info_cache with network_info: [{"id": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "address": "fa:16:3e:a6:1c:0d", "network": {"id": "199703e5-fc18-408d-a14c-e309d6e3fb78", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1056046585-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "565c1cd0648646e19d73a6b1546ba28f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap775d733a-ad", "ovs_interfaceid": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.041647] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1389.041956] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.042072] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.042388] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1389.043506] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5db76698-9fcd-4ac6-8f1c-21988d499ae6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.063892] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae/aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1389.071971] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4538f424-7ee7-44a5-b5ab-00c739fe18a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.087392] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778948, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.089634] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1389.089634] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52624064-ae03-9d43-801b-eec83b57ed8b" [ 1389.089634] env[63379]: _type = "Task" [ 1389.089634] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.097683] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1389.097683] env[63379]: value = "task-1778951" [ 1389.097683] env[63379]: _type = "Task" [ 1389.097683] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.102287] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52624064-ae03-9d43-801b-eec83b57ed8b, 'name': SearchDatastore_Task, 'duration_secs': 0.016772} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.104828] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.105138] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1389.107017] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.116930] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778951, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.131182] env[63379]: DEBUG oslo_vmware.api [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Task: {'id': task-1778944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.760167} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.134579] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1389.134828] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1389.135062] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1389.135318] env[63379]: INFO nova.compute.manager [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1389.135594] env[63379]: DEBUG oslo.service.loopingcall [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1389.136038] env[63379]: DEBUG nova.compute.manager [-] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1389.136119] env[63379]: DEBUG nova.network.neutron [-] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1389.198093] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52502df2-d704-ffa3-e8f6-74a2addf36a9, 'name': SearchDatastore_Task, 'duration_secs': 0.055637} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.202067] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.202067] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d47be684-6cd8-45c6-8c6a-9a6db0390f97/d47be684-6cd8-45c6-8c6a-9a6db0390f97.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.202067] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.202067] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1389.202812] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55a69f56-8ab4-40eb-99e9-984d4b7a39fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.204649] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-893e70ac-c915-4189-a0ba-340446e4836d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.216016] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1389.216016] env[63379]: value = "task-1778952" [ 1389.216016] env[63379]: _type = "Task" [ 1389.216016] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.217614] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1389.217614] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1389.223440] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c037c29c-ba13-46e7-8bd4-e3e6e1780632 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.234580] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.235743] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1389.235743] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528b3c8d-cd4c-a31d-ce1f-1f0622a7299d" [ 1389.235743] env[63379]: _type = "Task" [ 1389.235743] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.250232] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528b3c8d-cd4c-a31d-ce1f-1f0622a7299d, 'name': SearchDatastore_Task} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.250494] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.250757] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.251500] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd5c12a2-d4bb-48d7-bb99-b71f4c871024 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.257196] env[63379]: INFO nova.compute.manager [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] instance snapshotting [ 1389.257406] env[63379]: WARNING nova.compute.manager [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1389.263018] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d49542-64e6-4118-9395-b01836ef8fb3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.264627] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1389.264627] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f54162-3078-f391-574b-5339fada8de3" [ 1389.264627] env[63379]: _type = "Task" [ 1389.264627] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.283185] env[63379]: DEBUG nova.network.neutron [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Successfully created port: 1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1389.289514] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07af48e4-d290-4a71-88ff-140f419f5d70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.297248] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f54162-3078-f391-574b-5339fada8de3, 'name': SearchDatastore_Task, 'duration_secs': 0.012775} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.298475] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.298475] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ae565930-1bbc-4e75-bfc1-25dbcfd2e999/ae565930-1bbc-4e75-bfc1-25dbcfd2e999.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.298896] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd5cde1f-5a32-42b2-a898-2760ca8fb9c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.311162] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1389.311162] env[63379]: value = "task-1778953" [ 1389.311162] env[63379]: _type = "Task" [ 1389.311162] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.326841] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.330397] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bee0cb-e3d8-442b-adc3-2f0292c1d900 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.337482] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8134b0-726d-4ec2-ad0a-bfc69205fd9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.375443] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735c4e6d-7d5f-425e-aeb2-5f903fd8b385 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.385544] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd0f635-5d89-4717-b5f0-d38e3643de0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.390033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.390292] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.390492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.390665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.390828] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.404156] env[63379]: DEBUG nova.compute.provider_tree [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.405774] env[63379]: INFO nova.compute.manager [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Terminating instance [ 1389.407894] env[63379]: DEBUG nova.compute.manager [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1389.408103] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1389.408912] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbb3ac2-3b73-4aac-a0f5-f9e366ed9b52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.418436] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1389.418705] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd631c49-2b3f-40ea-a0d1-38505c86007f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.427633] env[63379]: DEBUG oslo_vmware.api [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1389.427633] env[63379]: value = "task-1778954" [ 1389.427633] env[63379]: _type = "Task" [ 1389.427633] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.438897] env[63379]: DEBUG oslo_vmware.api [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.469153] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211699} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.469637] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1389.469734] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1389.469831] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1389.545621] env[63379]: DEBUG oslo_concurrency.lockutils [req-f45c1bbe-c05f-45b4-a3cf-444f27efbd8a req-6edf1171-b051-4886-af88-aa7a2412ace5 service nova] Releasing lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.546124] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778948, 'name': CreateVM_Task, 'duration_secs': 0.610229} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.546384] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1389.546974] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.547166] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.547508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1389.547771] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6ce8134-66b2-4be2-80b0-829d715ec6b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.554336] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1389.554336] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b9f505-c4f2-efc2-cd1f-bb405a293623" [ 1389.554336] env[63379]: _type = "Task" [ 1389.554336] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.567027] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b9f505-c4f2-efc2-cd1f-bb405a293623, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.613107] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778951, 'name': ReconfigVM_Task, 'duration_secs': 0.42103} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.613391] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Reconfigured VM instance instance-00000009 to attach disk [datastore1] aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae/aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1389.614299] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e163022-6f68-4cbe-b5f8-88d6a33ed331 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.622869] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1389.622869] env[63379]: value = "task-1778955" [ 1389.622869] env[63379]: _type = "Task" [ 1389.622869] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.635830] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778955, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.729796] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778952, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.807036] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1389.810054] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-38af6b36-c2b6-4245-ac3b-633c8259a9fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.817490] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1389.817490] env[63379]: value = "task-1778956" [ 1389.817490] env[63379]: _type = "Task" [ 1389.817490] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.825690] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778953, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.828878] env[63379]: DEBUG nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1389.835640] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778956, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.863704] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:28:26Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1169877476',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-384164557',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1389.863999] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1389.864210] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1389.864435] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1389.864549] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1389.864753] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1389.864929] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1389.865235] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1389.865517] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1389.865677] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1389.865839] env[63379]: DEBUG nova.virt.hardware [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1389.866835] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0350bef-01c3-451b-95a2-fea21233f8db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.879199] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223de038-722a-4de4-b811-1be94253cea9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.908526] env[63379]: DEBUG nova.scheduler.client.report [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1389.941054] env[63379]: DEBUG oslo_vmware.api [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778954, 'name': PowerOffVM_Task, 'duration_secs': 0.333846} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.941346] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1389.941540] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1389.941904] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb5556b0-87d1-42db-b80a-3062cb736337 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.999339] env[63379]: DEBUG nova.network.neutron [-] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.065586] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b9f505-c4f2-efc2-cd1f-bb405a293623, 'name': SearchDatastore_Task, 'duration_secs': 0.067102} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.065951] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.066388] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.066530] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.067167] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.067167] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1390.067167] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17bc343a-fa19-44b2-bce0-04b91a402f2e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.099017] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1390.099017] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1390.099017] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2205124-4e82-41d4-8377-91d6ed7c786c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.104036] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1390.104036] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e72a71-4020-174b-6ea6-683a3b9daaad" [ 1390.104036] env[63379]: _type = "Task" [ 1390.104036] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.117935] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e72a71-4020-174b-6ea6-683a3b9daaad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.137686] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778955, 'name': Rename_Task, 'duration_secs': 0.274539} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.139027] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1390.139027] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a4c4166-b532-4839-a871-53a3e298eb8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.153082] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1390.153082] env[63379]: value = "task-1778958" [ 1390.153082] env[63379]: _type = "Task" [ 1390.153082] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.168110] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.229949] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562668} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.231626] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d47be684-6cd8-45c6-8c6a-9a6db0390f97/d47be684-6cd8-45c6-8c6a-9a6db0390f97.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.231626] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.231626] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0de3e0db-04e5-41ae-81da-2d6493c47609 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.240898] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1390.240898] env[63379]: value = "task-1778959" [ 1390.240898] env[63379]: _type = "Task" [ 1390.240898] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.245827] env[63379]: DEBUG nova.network.neutron [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Updating instance_info_cache with network_info: [{"id": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "address": "fa:16:3e:e8:9e:1c", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb26a8dba-cd", "ovs_interfaceid": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "address": "fa:16:3e:69:33:b5", "network": {"id": "e8790bfb-d523-4cc1-bcef-85c8c3bda31e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1868676702", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb52a59c-c5", "ovs_interfaceid": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "address": "fa:16:3e:fa:27:a4", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc856b8f8-34", "ovs_interfaceid": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.256260] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778959, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.272924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.272924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.328972] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778953, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.337604] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778956, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.415599] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.415599] env[63379]: DEBUG nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1390.417899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.777s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.426808] env[63379]: INFO nova.compute.claims [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1390.437202] env[63379]: DEBUG nova.compute.manager [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-vif-plugged-c856b8f8-3490-43b2-b2c2-b96a5c3e550e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1390.437808] env[63379]: DEBUG oslo_concurrency.lockutils [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] Acquiring lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.437808] env[63379]: DEBUG oslo_concurrency.lockutils [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.437996] env[63379]: DEBUG oslo_concurrency.lockutils [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.438160] env[63379]: DEBUG nova.compute.manager [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] No waiting events found dispatching network-vif-plugged-c856b8f8-3490-43b2-b2c2-b96a5c3e550e {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1390.438310] env[63379]: WARNING nova.compute.manager [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received unexpected event network-vif-plugged-c856b8f8-3490-43b2-b2c2-b96a5c3e550e for instance with vm_state building and task_state spawning. [ 1390.438545] env[63379]: DEBUG nova.compute.manager [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-changed-c856b8f8-3490-43b2-b2c2-b96a5c3e550e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1390.438705] env[63379]: DEBUG nova.compute.manager [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Refreshing instance network info cache due to event network-changed-c856b8f8-3490-43b2-b2c2-b96a5c3e550e. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1390.439048] env[63379]: DEBUG oslo_concurrency.lockutils [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] Acquiring lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.469889] env[63379]: DEBUG nova.compute.manager [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Received event network-vif-plugged-a7d101c2-09da-4502-aa7a-988de81f6ee7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1390.470921] env[63379]: DEBUG oslo_concurrency.lockutils [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] Acquiring lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.470921] env[63379]: DEBUG oslo_concurrency.lockutils [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.470921] env[63379]: DEBUG oslo_concurrency.lockutils [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.471525] env[63379]: DEBUG nova.compute.manager [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] No waiting events found dispatching network-vif-plugged-a7d101c2-09da-4502-aa7a-988de81f6ee7 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1390.472036] env[63379]: WARNING nova.compute.manager [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Received unexpected event network-vif-plugged-a7d101c2-09da-4502-aa7a-988de81f6ee7 for instance with vm_state building and task_state spawning. [ 1390.473682] env[63379]: DEBUG nova.compute.manager [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Received event network-changed-a7d101c2-09da-4502-aa7a-988de81f6ee7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1390.473682] env[63379]: DEBUG nova.compute.manager [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Refreshing instance network info cache due to event network-changed-a7d101c2-09da-4502-aa7a-988de81f6ee7. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1390.473682] env[63379]: DEBUG oslo_concurrency.lockutils [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] Acquiring lock "refresh_cache-5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.473682] env[63379]: DEBUG oslo_concurrency.lockutils [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] Acquired lock "refresh_cache-5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.473682] env[63379]: DEBUG nova.network.neutron [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Refreshing network info cache for port a7d101c2-09da-4502-aa7a-988de81f6ee7 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1390.506615] env[63379]: INFO nova.compute.manager [-] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Took 1.37 seconds to deallocate network for instance. [ 1390.513134] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1390.513530] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1390.513813] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1390.514142] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1390.514320] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1390.514547] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1390.515724] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1390.516026] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1390.516284] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1390.516514] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1390.516746] env[63379]: DEBUG nova.virt.hardware [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1390.519341] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7050b277-d2e3-4767-a0a7-ce533288aa1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.536506] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fb90b5-dac6-46e8-ae4e-3bd00e964d5c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.553299] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1390.560992] env[63379]: DEBUG oslo.service.loopingcall [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.561412] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1390.561690] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f80eafb-b308-4d87-95d2-53d5f9a6b38a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.581628] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1390.581628] env[63379]: value = "task-1778960" [ 1390.581628] env[63379]: _type = "Task" [ 1390.581628] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.591046] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778960, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.620024] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e72a71-4020-174b-6ea6-683a3b9daaad, 'name': SearchDatastore_Task, 'duration_secs': 0.063281} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.620024] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0faef1c9-4db1-47d9-ade3-1547b752db9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.629763] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1390.629763] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522c7e9f-6ca4-c056-b46a-8850733ed2ca" [ 1390.629763] env[63379]: _type = "Task" [ 1390.629763] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.640255] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522c7e9f-6ca4-c056-b46a-8850733ed2ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.664670] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778958, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.749077] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Releasing lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.749709] env[63379]: DEBUG nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Instance network_info: |[{"id": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "address": "fa:16:3e:e8:9e:1c", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb26a8dba-cd", "ovs_interfaceid": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "address": "fa:16:3e:69:33:b5", "network": {"id": "e8790bfb-d523-4cc1-bcef-85c8c3bda31e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1868676702", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb52a59c-c5", "ovs_interfaceid": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "address": "fa:16:3e:fa:27:a4", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc856b8f8-34", "ovs_interfaceid": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1390.750571] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Acquired lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.750894] env[63379]: DEBUG nova.network.neutron [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Refreshing network info cache for port cb52a59c-c52f-446e-b305-8cbd08c646d1 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1390.753119] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:9e:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b26a8dba-cd30-4320-901e-8e9a8584ea6f', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:33:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb52a59c-c52f-446e-b305-8cbd08c646d1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:27:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c856b8f8-3490-43b2-b2c2-b96a5c3e550e', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1390.770248] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Creating folder: Project (a2519cafe6c84b12b560995b2d3dd84d). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.778622] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38a2542a-4638-48e6-b158-ef1449d55732 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.781218] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778959, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.201627} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.781489] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.783030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd395fba-3bf9-4156-8dea-7751d645a017 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.812653] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] d47be684-6cd8-45c6-8c6a-9a6db0390f97/d47be684-6cd8-45c6-8c6a-9a6db0390f97.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.814718] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b55b45a-bb57-45d7-9e53-f0fd8cbf1355 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.831052] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Created folder: Project (a2519cafe6c84b12b560995b2d3dd84d) in parent group-v369214. [ 1390.831238] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Creating folder: Instances. Parent ref: group-v369249. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.837359] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d7d5fc3-b47b-4232-ac5c-97f5690ce14f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.855665] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1390.855665] env[63379]: value = "task-1778962" [ 1390.855665] env[63379]: _type = "Task" [ 1390.855665] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.857757] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778953, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.258875} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.864259] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ae565930-1bbc-4e75-bfc1-25dbcfd2e999/ae565930-1bbc-4e75-bfc1-25dbcfd2e999.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.864523] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.864805] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778956, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.864952] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05c0eed9-57f6-4f3a-910b-8d68cba18cca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.873753] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778962, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.875438] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1390.875438] env[63379]: value = "task-1778964" [ 1390.875438] env[63379]: _type = "Task" [ 1390.875438] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.879982] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Created folder: Instances in parent group-v369249. [ 1390.880207] env[63379]: DEBUG oslo.service.loopingcall [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.880914] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1390.881036] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3dea8fb-5e2d-4da1-b8f5-1e86908b633d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.906107] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.913444] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1390.913444] env[63379]: value = "task-1778965" [ 1390.913444] env[63379]: _type = "Task" [ 1390.913444] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.923115] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778965, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.928331] env[63379]: DEBUG nova.compute.utils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1390.934683] env[63379]: DEBUG nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1390.934683] env[63379]: DEBUG nova.network.neutron [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.006056] env[63379]: DEBUG nova.policy [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6330e918269b488bb1634cbbe759a875', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8ea1d9b2b194236ac9e91082b291b97', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1391.031313] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.102837] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778960, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.144939] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522c7e9f-6ca4-c056-b46a-8850733ed2ca, 'name': SearchDatastore_Task, 'duration_secs': 0.129286} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.148098] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.148098] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd/5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1391.148098] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b00badc-6393-47c0-af0d-63a7bd41c041 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.160420] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1391.160420] env[63379]: value = "task-1778966" [ 1391.160420] env[63379]: _type = "Task" [ 1391.160420] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.172806] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778958, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.178958] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778966, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.254281] env[63379]: DEBUG nova.network.neutron [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Updated VIF entry in instance network info cache for port cb52a59c-c52f-446e-b305-8cbd08c646d1. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1391.254281] env[63379]: DEBUG nova.network.neutron [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Updating instance_info_cache with network_info: [{"id": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "address": "fa:16:3e:e8:9e:1c", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb26a8dba-cd", "ovs_interfaceid": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "address": "fa:16:3e:69:33:b5", "network": {"id": "e8790bfb-d523-4cc1-bcef-85c8c3bda31e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1868676702", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb52a59c-c5", "ovs_interfaceid": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "address": "fa:16:3e:fa:27:a4", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc856b8f8-34", "ovs_interfaceid": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.357353] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778956, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.372488] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.387199] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.426726] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778965, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.433066] env[63379]: DEBUG nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1391.442938] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1391.443221] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1391.443418] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleting the datastore file [datastore1] c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1391.443681] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-747fe069-398b-40d3-b9eb-d06607d6905f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.452632] env[63379]: DEBUG oslo_vmware.api [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1391.452632] env[63379]: value = "task-1778967" [ 1391.452632] env[63379]: _type = "Task" [ 1391.452632] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.471587] env[63379]: DEBUG oslo_vmware.api [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778967, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.509961] env[63379]: DEBUG nova.network.neutron [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Updated VIF entry in instance network info cache for port a7d101c2-09da-4502-aa7a-988de81f6ee7. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1391.510389] env[63379]: DEBUG nova.network.neutron [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Updating instance_info_cache with network_info: [{"id": "a7d101c2-09da-4502-aa7a-988de81f6ee7", "address": "fa:16:3e:dd:9a:15", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7d101c2-09", "ovs_interfaceid": "a7d101c2-09da-4502-aa7a-988de81f6ee7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.603892] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778960, 'name': CreateVM_Task, 'duration_secs': 0.822735} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.603892] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1391.604193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.604901] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.604901] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1391.605046] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6039d596-5b58-4f21-819f-3a097543e002 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.610632] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1391.610632] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525448b0-506c-c342-e99b-ccf0c44167db" [ 1391.610632] env[63379]: _type = "Task" [ 1391.610632] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.624169] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525448b0-506c-c342-e99b-ccf0c44167db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.667166] env[63379]: DEBUG oslo_vmware.api [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1778958, 'name': PowerOnVM_Task, 'duration_secs': 1.252308} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.674339] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1391.674339] env[63379]: INFO nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Took 12.41 seconds to spawn the instance on the hypervisor. [ 1391.674339] env[63379]: DEBUG nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1391.675438] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87b5b03-a524-4271-9973-f825bca35a05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.684388] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778966, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.760063] env[63379]: DEBUG oslo_concurrency.lockutils [req-7f87b4af-b595-46a3-a99c-6f84961fd7a5 req-1c18e078-1fc8-4210-af49-298975243e6e service nova] Releasing lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.760063] env[63379]: DEBUG oslo_concurrency.lockutils [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] Acquired lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.760063] env[63379]: DEBUG nova.network.neutron [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Refreshing network info cache for port c856b8f8-3490-43b2-b2c2-b96a5c3e550e {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1391.761908] env[63379]: DEBUG nova.network.neutron [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Successfully created port: 65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1391.828151] env[63379]: DEBUG nova.network.neutron [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Successfully updated port: 1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1391.854495] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778956, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.872276] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778962, 'name': ReconfigVM_Task, 'duration_secs': 0.579861} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.872584] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Reconfigured VM instance instance-0000000a to attach disk [datastore1] d47be684-6cd8-45c6-8c6a-9a6db0390f97/d47be684-6cd8-45c6-8c6a-9a6db0390f97.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.873543] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-145a4988-2070-4f0d-8c89-e71f8300ae65 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.887138] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1391.887138] env[63379]: value = "task-1778968" [ 1391.887138] env[63379]: _type = "Task" [ 1391.887138] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.895981] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.800425} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.895981] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1391.895981] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f44e3fa-45c2-45ba-adde-ee12140c7198 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.904737] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778968, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.925924] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] ae565930-1bbc-4e75-bfc1-25dbcfd2e999/ae565930-1bbc-4e75-bfc1-25dbcfd2e999.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1391.931898] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ef9d5ff-5e22-4b07-8ff7-d10b40bcfcd8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.963367] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778965, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.966682] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1391.966682] env[63379]: value = "task-1778969" [ 1391.966682] env[63379]: _type = "Task" [ 1391.966682] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.980427] env[63379]: DEBUG oslo_vmware.api [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778967, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.984577] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778969, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.994273] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d39180-3fd5-4134-bbe8-54c3c8cb5b70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.008065] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0348b8b-a334-41df-8eca-84481415d88f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.015110] env[63379]: DEBUG oslo_concurrency.lockutils [req-ad8d103b-b556-4005-9ddc-61d84d353e28 req-9113aa60-93bf-4d05-a4fd-8d37f46615f9 service nova] Releasing lock "refresh_cache-5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.043869] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51c486f-b115-41a1-a407-da57126d896c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.052231] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ef4466-9049-4a61-b667-75cb9662dcca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.069394] env[63379]: DEBUG nova.compute.provider_tree [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1392.131369] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525448b0-506c-c342-e99b-ccf0c44167db, 'name': SearchDatastore_Task, 'duration_secs': 0.031786} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.131369] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.131568] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.131780] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.131962] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.132171] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1392.132460] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed76bd0e-5298-4a1b-8aaa-087012aa075c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.146671] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1392.146887] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1392.147719] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a257e7-c313-4780-b13b-ff1d682e4329 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.155198] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1392.155198] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bbccc2-9347-3da1-3a4d-02dd92b9fad2" [ 1392.155198] env[63379]: _type = "Task" [ 1392.155198] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.166663] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bbccc2-9347-3da1-3a4d-02dd92b9fad2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.181949] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778966, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.887886} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.183583] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd/5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1392.184020] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1392.184232] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8594476e-a07e-4707-b040-94141ab2493d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.200260] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1392.200260] env[63379]: value = "task-1778970" [ 1392.200260] env[63379]: _type = "Task" [ 1392.200260] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.206752] env[63379]: INFO nova.compute.manager [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Took 26.05 seconds to build instance. [ 1392.215775] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.331474] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.331687] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.331917] env[63379]: DEBUG nova.network.neutron [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1392.352097] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778956, 'name': CreateSnapshot_Task, 'duration_secs': 2.245592} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.352384] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1392.353211] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b34d74e-490e-43e0-8418-13c042fa4ec2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.400803] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778968, 'name': Rename_Task, 'duration_secs': 0.337691} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.400803] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1392.401087] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47592e4f-da80-4de5-90cf-5b736b492c6b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.412959] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1392.412959] env[63379]: value = "task-1778971" [ 1392.412959] env[63379]: _type = "Task" [ 1392.412959] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.427415] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.431686] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778965, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.460491] env[63379]: DEBUG nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1392.469101] env[63379]: DEBUG oslo_vmware.api [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1778967, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.700709} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.469194] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1392.469367] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1392.469831] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1392.469960] env[63379]: INFO nova.compute.manager [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Took 3.06 seconds to destroy the instance on the hypervisor. [ 1392.470178] env[63379]: DEBUG oslo.service.loopingcall [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1392.474254] env[63379]: DEBUG nova.compute.manager [-] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1392.474254] env[63379]: DEBUG nova.network.neutron [-] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1392.482537] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778969, 'name': ReconfigVM_Task, 'duration_secs': 0.416753} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.484556] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Reconfigured VM instance instance-0000000c to attach disk [datastore1] ae565930-1bbc-4e75-bfc1-25dbcfd2e999/ae565930-1bbc-4e75-bfc1-25dbcfd2e999.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1392.485974] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ad8b2ad-7b2a-4cda-b2bc-a8ed051bc09e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.494719] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1392.494719] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1392.494719] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1392.494931] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1392.495142] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1392.495322] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1392.495423] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1392.495582] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1392.495766] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1392.495926] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1392.496110] env[63379]: DEBUG nova.virt.hardware [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1392.497080] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79709cd0-8bdf-4ab1-be19-f352e32db57c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.504087] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1392.504087] env[63379]: value = "task-1778972" [ 1392.504087] env[63379]: _type = "Task" [ 1392.504087] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.516014] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e8989b-574f-43c8-bfed-30f45c10ffc4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.525826] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778972, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.605423] env[63379]: ERROR nova.scheduler.client.report [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [req-8a837e44-fa35-4215-9efe-28cefacd40de] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8a837e44-fa35-4215-9efe-28cefacd40de"}]} [ 1392.623816] env[63379]: DEBUG nova.scheduler.client.report [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1392.643434] env[63379]: DEBUG nova.scheduler.client.report [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1392.643645] env[63379]: DEBUG nova.compute.provider_tree [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1392.664756] env[63379]: DEBUG nova.scheduler.client.report [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1392.675521] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bbccc2-9347-3da1-3a4d-02dd92b9fad2, 'name': SearchDatastore_Task, 'duration_secs': 0.010903} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.676257] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b731587e-c779-4cb8-b211-d4013ce2a37f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.682999] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1392.682999] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52abef6c-37b4-be00-4477-aecdaf0b8864" [ 1392.682999] env[63379]: _type = "Task" [ 1392.682999] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.687673] env[63379]: DEBUG nova.scheduler.client.report [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1392.697929] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52abef6c-37b4-be00-4477-aecdaf0b8864, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.713570] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6eaa923-6d94-49d7-9fe3-9475fe23f0a8 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.569s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.713899] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082093} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.715292] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1392.719343] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1708d773-9a23-4205-aaf2-f8659ba21742 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.752356] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd/5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1392.755950] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-486e6c46-6556-4902-b762-cb7206a9bf27 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.782027] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1392.782027] env[63379]: value = "task-1778973" [ 1392.782027] env[63379]: _type = "Task" [ 1392.782027] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.797560] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778973, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.890255] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1392.893167] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-60167923-550a-4f4e-a045-203556fe8322 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.903077] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1392.903077] env[63379]: value = "task-1778974" [ 1392.903077] env[63379]: _type = "Task" [ 1392.903077] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.916975] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778974, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.931981] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778971, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.932078] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778965, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.015529] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778972, 'name': Rename_Task, 'duration_secs': 0.150727} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.016804] env[63379]: DEBUG nova.network.neutron [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1393.018672] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1393.021843] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf8a7d56-b513-403a-8a6a-4e521dcab272 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.030678] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1393.030678] env[63379]: value = "task-1778975" [ 1393.030678] env[63379]: _type = "Task" [ 1393.030678] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.041558] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.157939] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38bffef-11d1-45d8-83b2-ae86ac14e927 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.167406] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317af725-eedf-48c3-8e30-d3a861a3b758 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.212056] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e088ec0a-6cc7-4031-b604-a9f8920c1302 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.216763] env[63379]: DEBUG nova.network.neutron [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Updated VIF entry in instance network info cache for port c856b8f8-3490-43b2-b2c2-b96a5c3e550e. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1393.217178] env[63379]: DEBUG nova.network.neutron [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Updating instance_info_cache with network_info: [{"id": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "address": "fa:16:3e:e8:9e:1c", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.53", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb26a8dba-cd", "ovs_interfaceid": "b26a8dba-cd30-4320-901e-8e9a8584ea6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "address": "fa:16:3e:69:33:b5", "network": {"id": "e8790bfb-d523-4cc1-bcef-85c8c3bda31e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1868676702", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb52a59c-c5", "ovs_interfaceid": "cb52a59c-c52f-446e-b305-8cbd08c646d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "address": "fa:16:3e:fa:27:a4", "network": {"id": "f649ba18-eb99-4af4-b4c8-f441759040ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-124911587", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc856b8f8-34", "ovs_interfaceid": "c856b8f8-3490-43b2-b2c2-b96a5c3e550e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.218729] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1393.229229] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52abef6c-37b4-be00-4477-aecdaf0b8864, 'name': SearchDatastore_Task, 'duration_secs': 0.020014} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.232890] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.233620] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1393.234977] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb23e86-f868-4beb-8392-b5dc06684549 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.240314] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9833d06a-cf41-404b-9dca-a1a6922a3d70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.256185] env[63379]: DEBUG nova.compute.provider_tree [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1393.259126] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1393.259126] env[63379]: value = "task-1778976" [ 1393.259126] env[63379]: _type = "Task" [ 1393.259126] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.273239] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778976, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.292257] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778973, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.415788] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778974, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.427469] env[63379]: DEBUG nova.network.neutron [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Updating instance_info_cache with network_info: [{"id": "1913f18a-c402-444f-bfec-50a3ab88167d", "address": "fa:16:3e:17:52:16", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1913f18a-c4", "ovs_interfaceid": "1913f18a-c402-444f-bfec-50a3ab88167d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.442991] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778971, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.443239] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778965, 'name': CreateVM_Task, 'duration_secs': 2.035248} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.443400] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1393.444325] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.444503] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.444789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1393.445058] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eadab6f9-0f5a-4f4a-9f55-868f1b30979e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.452327] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1393.452327] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521c5319-c66f-6681-cf07-766d95cb07fb" [ 1393.452327] env[63379]: _type = "Task" [ 1393.452327] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.462996] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521c5319-c66f-6681-cf07-766d95cb07fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.543480] env[63379]: DEBUG oslo_vmware.api [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1778975, 'name': PowerOnVM_Task, 'duration_secs': 0.491982} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.543748] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1393.543979] env[63379]: INFO nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Took 6.19 seconds to spawn the instance on the hypervisor. [ 1393.544219] env[63379]: DEBUG nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1393.545042] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2b1c6f-8f88-46da-91a9-17ab08d3e996 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.727903] env[63379]: DEBUG oslo_concurrency.lockutils [req-b958a6ac-40e6-4d9a-94a7-d20dca3d8220 req-1ac71521-2896-4d13-ab1d-bf444e72d675 service nova] Releasing lock "refresh_cache-30908171-e1b9-4e20-830e-419ff6d9a0fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.758961] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.785570] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778976, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.803770] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778973, 'name': ReconfigVM_Task, 'duration_secs': 0.732814} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.805203] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd/5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1393.806989] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4c5358e-92e1-4a5d-87f5-dd284e9fcc7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.822739] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1393.822739] env[63379]: value = "task-1778977" [ 1393.822739] env[63379]: _type = "Task" [ 1393.822739] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.828023] env[63379]: DEBUG nova.compute.manager [req-d0fbf119-2c9c-4e68-8b7f-ff31d3f5eff5 req-59cc5e30-401b-402b-ada9-af0163889767 service nova] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Received event network-vif-deleted-47acb26e-647c-4d9a-bcfd-7c9ea5cf9846 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1393.835347] env[63379]: DEBUG nova.scheduler.client.report [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 28 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1393.835673] env[63379]: DEBUG nova.compute.provider_tree [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 28 to 29 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1393.835859] env[63379]: DEBUG nova.compute.provider_tree [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1393.851171] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778977, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.852831] env[63379]: DEBUG nova.network.neutron [-] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.861578] env[63379]: DEBUG nova.compute.manager [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Received event network-vif-plugged-1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1393.861578] env[63379]: DEBUG oslo_concurrency.lockutils [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] Acquiring lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.861578] env[63379]: DEBUG oslo_concurrency.lockutils [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.861578] env[63379]: DEBUG oslo_concurrency.lockutils [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.861756] env[63379]: DEBUG nova.compute.manager [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] No waiting events found dispatching network-vif-plugged-1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1393.861908] env[63379]: WARNING nova.compute.manager [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Received unexpected event network-vif-plugged-1913f18a-c402-444f-bfec-50a3ab88167d for instance with vm_state building and task_state spawning. [ 1393.863866] env[63379]: DEBUG nova.compute.manager [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Received event network-changed-1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1393.863866] env[63379]: DEBUG nova.compute.manager [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Refreshing instance network info cache due to event network-changed-1913f18a-c402-444f-bfec-50a3ab88167d. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1393.863866] env[63379]: DEBUG oslo_concurrency.lockutils [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] Acquiring lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.919250] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778974, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.933056] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Releasing lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.933056] env[63379]: DEBUG nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Instance network_info: |[{"id": "1913f18a-c402-444f-bfec-50a3ab88167d", "address": "fa:16:3e:17:52:16", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1913f18a-c4", "ovs_interfaceid": "1913f18a-c402-444f-bfec-50a3ab88167d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1393.933444] env[63379]: DEBUG oslo_vmware.api [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1778971, 'name': PowerOnVM_Task, 'duration_secs': 1.224477} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.933645] env[63379]: DEBUG oslo_concurrency.lockutils [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] Acquired lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.933836] env[63379]: DEBUG nova.network.neutron [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Refreshing network info cache for port 1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1393.935152] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:52:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1913f18a-c402-444f-bfec-50a3ab88167d', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.945991] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Creating folder: Project (ea2c1f9216ee4d8e8349a27de543c2d5). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.945991] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1393.945991] env[63379]: INFO nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Took 12.20 seconds to spawn the instance on the hypervisor. [ 1393.945991] env[63379]: DEBUG nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1393.946334] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb1138e3-ac0f-4c85-812e-ada8b1388e3e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.949359] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0771916-aa5a-4f3b-ae0e-2633e4efda1c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.969040] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Created folder: Project (ea2c1f9216ee4d8e8349a27de543c2d5) in parent group-v369214. [ 1393.969040] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Creating folder: Instances. Parent ref: group-v369254. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.970020] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-560a1e8d-3d95-4095-8e60-edbeba04f5af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.975729] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521c5319-c66f-6681-cf07-766d95cb07fb, 'name': SearchDatastore_Task, 'duration_secs': 0.021406} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.976780] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.976780] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1393.976949] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.977563] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.977563] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1393.977563] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-630aeb08-f822-4f5c-880b-e62714eede8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.993052] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Created folder: Instances in parent group-v369254. [ 1393.993052] env[63379]: DEBUG oslo.service.loopingcall [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.993052] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1393.993052] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d89b7fa-f105-4295-9ce0-901ef0b76b1c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.008804] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1394.009056] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1394.010252] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b804461-f3c0-4554-88f9-26339b181145 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.018398] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1394.018398] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52714895-c901-e477-12de-eca1852be1f3" [ 1394.018398] env[63379]: _type = "Task" [ 1394.018398] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.020522] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1394.020522] env[63379]: value = "task-1778980" [ 1394.020522] env[63379]: _type = "Task" [ 1394.020522] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.037046] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52714895-c901-e477-12de-eca1852be1f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.037786] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778980, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.062806] env[63379]: INFO nova.compute.manager [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Took 12.78 seconds to build instance. [ 1394.276604] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778976, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.798593} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.276852] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1394.277294] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1394.277410] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ddf40f2-e3c5-46d4-95f1-d3efcc17c839 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.285995] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1394.285995] env[63379]: value = "task-1778981" [ 1394.285995] env[63379]: _type = "Task" [ 1394.285995] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.297014] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.340090] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778977, 'name': Rename_Task, 'duration_secs': 0.315185} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.340516] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1394.340810] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6015f847-acbd-465a-bd1d-7ebc6b1881d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.343587] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.926s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.344131] env[63379]: DEBUG nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1394.347458] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.616s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.350454] env[63379]: INFO nova.compute.claims [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1394.355806] env[63379]: INFO nova.compute.manager [-] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Took 1.88 seconds to deallocate network for instance. [ 1394.356220] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1394.356220] env[63379]: value = "task-1778982" [ 1394.356220] env[63379]: _type = "Task" [ 1394.356220] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.380084] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778982, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.382949] env[63379]: DEBUG nova.network.neutron [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Successfully updated port: 65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.421792] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778974, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.476475] env[63379]: INFO nova.compute.manager [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Took 23.54 seconds to build instance. [ 1394.540474] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778980, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.540802] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52714895-c901-e477-12de-eca1852be1f3, 'name': SearchDatastore_Task, 'duration_secs': 0.019138} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.541627] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b747487c-6488-47fa-8bc8-239e9712ce87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.548381] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1394.548381] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5213453f-e427-707b-ddb1-698db2e35d94" [ 1394.548381] env[63379]: _type = "Task" [ 1394.548381] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.558015] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5213453f-e427-707b-ddb1-698db2e35d94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.565541] env[63379]: DEBUG oslo_concurrency.lockutils [None req-166eac9c-74d7-4a2f-8734-3af23407662e tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.752s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.800262] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074033} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.800262] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1394.800943] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc682dd6-d145-4aa4-992a-5e19d85c9d5c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.828661] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1394.831949] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18ecdbff-d3b8-4194-b6be-98559e4a9c68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.850989] env[63379]: DEBUG nova.compute.utils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1394.852703] env[63379]: DEBUG nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1394.852867] env[63379]: DEBUG nova.network.neutron [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1394.867251] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1394.867251] env[63379]: value = "task-1778983" [ 1394.867251] env[63379]: _type = "Task" [ 1394.867251] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.876247] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.876548] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778982, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.888212] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.888429] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquired lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.889066] env[63379]: DEBUG nova.network.neutron [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.892036] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.919506] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1778974, 'name': CloneVM_Task, 'duration_secs': 1.897663} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.920261] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Created linked-clone VM from snapshot [ 1394.921279] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f24d7a-68ed-4612-91fc-897da91f0f4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.935088] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Uploading image b4d4e2bd-9da6-4e0b-9f28-188f22313c1d {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1394.964539] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1394.964539] env[63379]: value = "vm-369253" [ 1394.964539] env[63379]: _type = "VirtualMachine" [ 1394.964539] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1394.966077] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f9e50319-fda0-4f2d-ba2e-fefaf82b2d87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.977927] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease: (returnval){ [ 1394.977927] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fdd674-ba40-4a97-9f45-4a785f5a268a" [ 1394.977927] env[63379]: _type = "HttpNfcLease" [ 1394.977927] env[63379]: } obtained for exporting VM: (result){ [ 1394.977927] env[63379]: value = "vm-369253" [ 1394.977927] env[63379]: _type = "VirtualMachine" [ 1394.977927] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1394.980699] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the lease: (returnval){ [ 1394.980699] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fdd674-ba40-4a97-9f45-4a785f5a268a" [ 1394.980699] env[63379]: _type = "HttpNfcLease" [ 1394.980699] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1394.980699] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae2ec06e-55a7-4156-a4d0-1e4d70408934 tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.069s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.997554] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1394.997554] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fdd674-ba40-4a97-9f45-4a785f5a268a" [ 1394.997554] env[63379]: _type = "HttpNfcLease" [ 1394.997554] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1394.997554] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1394.997554] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fdd674-ba40-4a97-9f45-4a785f5a268a" [ 1394.997554] env[63379]: _type = "HttpNfcLease" [ 1394.997554] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1394.998470] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b454b804-9f7e-41a1-a32a-fcd451c6fc85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.013863] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52475826-aa83-3f8d-9bba-4be5eade29bf/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1395.018216] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52475826-aa83-3f8d-9bba-4be5eade29bf/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1395.018216] env[63379]: DEBUG nova.policy [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd383a92a7b4c42dd91075f90490851a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf8ce6145b424cd88396c7b1f31e4498', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1395.088623] env[63379]: DEBUG nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1395.110653] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778980, 'name': CreateVM_Task, 'duration_secs': 0.525598} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.110653] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5213453f-e427-707b-ddb1-698db2e35d94, 'name': SearchDatastore_Task, 'duration_secs': 0.048005} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.110829] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1395.110992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.111386] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 30908171-e1b9-4e20-830e-419ff6d9a0fa/30908171-e1b9-4e20-830e-419ff6d9a0fa.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1395.113788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.113788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.113788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1395.113788] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1155568c-20b2-4448-a5f9-6a7d8777e3a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.116024] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0080984-bc50-4423-99bc-7d58005caee1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.122720] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1395.122720] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dbdaef-2cad-7d80-2eb3-bce91d59f8ef" [ 1395.122720] env[63379]: _type = "Task" [ 1395.122720] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.124866] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1395.124866] env[63379]: value = "task-1778985" [ 1395.124866] env[63379]: _type = "Task" [ 1395.124866] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.133749] env[63379]: DEBUG nova.network.neutron [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Updated VIF entry in instance network info cache for port 1913f18a-c402-444f-bfec-50a3ab88167d. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1395.134228] env[63379]: DEBUG nova.network.neutron [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Updating instance_info_cache with network_info: [{"id": "1913f18a-c402-444f-bfec-50a3ab88167d", "address": "fa:16:3e:17:52:16", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1913f18a-c4", "ovs_interfaceid": "1913f18a-c402-444f-bfec-50a3ab88167d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.141622] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.141893] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dbdaef-2cad-7d80-2eb3-bce91d59f8ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.146990] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-955cc70c-7c85-4144-9fb2-114f664d536c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.358857] env[63379]: DEBUG nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1395.411925] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778983, 'name': ReconfigVM_Task, 'duration_secs': 0.313743} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.412215] env[63379]: DEBUG oslo_vmware.api [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1778982, 'name': PowerOnVM_Task, 'duration_secs': 0.750974} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.413211] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Reconfigured VM instance instance-00000006 to attach disk [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1395.417321] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.417321] env[63379]: INFO nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Took 10.62 seconds to spawn the instance on the hypervisor. [ 1395.417460] env[63379]: DEBUG nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1395.423919] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f0af5ab-d6b6-48a3-8152-d7c27f137d3f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.426986] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb33a5d-5ba3-4be0-9408-80fad4557e2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.447031] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1395.447031] env[63379]: value = "task-1778986" [ 1395.447031] env[63379]: _type = "Task" [ 1395.447031] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.469729] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778986, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.489565] env[63379]: DEBUG nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1395.620620] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.636292] env[63379]: DEBUG nova.network.neutron [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.641626] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dbdaef-2cad-7d80-2eb3-bce91d59f8ef, 'name': SearchDatastore_Task, 'duration_secs': 0.016312} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.644989] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.645263] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1395.645502] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.645656] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.645840] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.646328] env[63379]: DEBUG oslo_concurrency.lockutils [req-c01b6428-b92a-40c4-a2d7-a47c54202c23 req-7c24a7d7-56f1-4099-8bc5-e6b530dba549 service nova] Releasing lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.646674] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778985, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.649575] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0ccd778-765b-46f2-8fc6-26dffe27877a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.665429] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.665704] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1395.669320] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb93f558-7211-4975-aa0d-0f635afb8166 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.681251] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1395.681251] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b04bfd-e415-1fdb-dfe8-5abac9191433" [ 1395.681251] env[63379]: _type = "Task" [ 1395.681251] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.689460] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b04bfd-e415-1fdb-dfe8-5abac9191433, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.813315] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a953e061-baed-4983-842c-3bbfd5dfd6e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.821856] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cba5ab-17aa-41f4-ab22-1e097a785f86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.860272] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eda52c4-a4d3-410d-a440-b26c9abce22e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.875504] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac2cbac-a147-449c-ba32-1f389826dae4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.911194] env[63379]: DEBUG nova.compute.provider_tree [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.966308] env[63379]: INFO nova.compute.manager [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Took 15.74 seconds to build instance. [ 1395.977109] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778986, 'name': Rename_Task, 'duration_secs': 0.356113} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.977571] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1395.980172] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0da4d9dc-414f-448a-a67b-32db288dbb9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.994427] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1395.994427] env[63379]: value = "task-1778987" [ 1395.994427] env[63379]: _type = "Task" [ 1395.994427] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.015086] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778987, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.022081] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.145377] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568651} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.145377] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 30908171-e1b9-4e20-830e-419ff6d9a0fa/30908171-e1b9-4e20-830e-419ff6d9a0fa.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1396.145377] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1396.145377] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77129e69-7399-4bd9-94ab-b23b4d9d2402 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.153571] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1396.153571] env[63379]: value = "task-1778988" [ 1396.153571] env[63379]: _type = "Task" [ 1396.153571] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.167050] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.193585] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b04bfd-e415-1fdb-dfe8-5abac9191433, 'name': SearchDatastore_Task, 'duration_secs': 0.015875} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.193851] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22d80597-250d-4c27-af09-10a45492e833 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.201418] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1396.201418] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528eda72-73eb-415e-5630-c5688dc1e3fe" [ 1396.201418] env[63379]: _type = "Task" [ 1396.201418] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.212681] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528eda72-73eb-415e-5630-c5688dc1e3fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.229710] env[63379]: DEBUG nova.network.neutron [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updating instance_info_cache with network_info: [{"id": "65e3bc3b-bfed-4dd6-be59-87481a211014", "address": "fa:16:3e:cf:dd:6f", "network": {"id": "66f99999-2093-485c-98b2-12d4a173be2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1040360959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8ea1d9b2b194236ac9e91082b291b97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e3bc3b-bf", "ovs_interfaceid": "65e3bc3b-bfed-4dd6-be59-87481a211014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.392162] env[63379]: DEBUG nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1396.414841] env[63379]: DEBUG nova.scheduler.client.report [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1396.426801] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1396.427506] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1396.427777] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.429326] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1396.429326] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.429326] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1396.429326] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1396.429326] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1396.429948] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1396.429948] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1396.430103] env[63379]: DEBUG nova.virt.hardware [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1396.431712] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fb6680-b4a5-4486-a7fd-3c740d48f39c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.444128] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de8b77d-b63d-42ae-a901-d07279a7fec0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.469469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fad848c6-71f9-42b0-b2a0-71180c56b97b tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.166s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.507643] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778987, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.573404] env[63379]: DEBUG nova.compute.manager [None req-f8eda5f4-e7e4-40f2-b357-a740fd15935f tempest-ServerDiagnosticsV248Test-551458049 tempest-ServerDiagnosticsV248Test-551458049-project-admin] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1396.575204] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05f6dac-824a-49f7-b4a8-256df3051d6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.584477] env[63379]: INFO nova.compute.manager [None req-f8eda5f4-e7e4-40f2-b357-a740fd15935f tempest-ServerDiagnosticsV248Test-551458049 tempest-ServerDiagnosticsV248Test-551458049-project-admin] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Retrieving diagnostics [ 1396.585711] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34e4b5e-08fa-407f-825c-d796641c976c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.623071] env[63379]: DEBUG nova.network.neutron [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Successfully created port: 901f3c6f-920f-4eed-b3b6-2ba116322aae {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1396.666264] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111357} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.666522] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1396.667440] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dd3c4e-11ab-4da3-835f-b9116348fa15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.697507] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 30908171-e1b9-4e20-830e-419ff6d9a0fa/30908171-e1b9-4e20-830e-419ff6d9a0fa.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1396.697965] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53cc1b73-b38f-4c3c-b75a-cd2f872fdff1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.730201] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528eda72-73eb-415e-5630-c5688dc1e3fe, 'name': SearchDatastore_Task, 'duration_secs': 0.013791} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.732325] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.732633] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] bf0dd3cf-684c-4378-a89c-5b9f16df062d/bf0dd3cf-684c-4378-a89c-5b9f16df062d.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1396.732979] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1396.732979] env[63379]: value = "task-1778989" [ 1396.732979] env[63379]: _type = "Task" [ 1396.732979] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.733775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Releasing lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.734562] env[63379]: DEBUG nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Instance network_info: |[{"id": "65e3bc3b-bfed-4dd6-be59-87481a211014", "address": "fa:16:3e:cf:dd:6f", "network": {"id": "66f99999-2093-485c-98b2-12d4a173be2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1040360959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8ea1d9b2b194236ac9e91082b291b97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e3bc3b-bf", "ovs_interfaceid": "65e3bc3b-bfed-4dd6-be59-87481a211014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1396.734692] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9c22c79-8f99-4ae5-8315-42a47b7bc1bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.737614] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:dd:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1d25020-c621-4388-ac1d-de55bfefbe50', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65e3bc3b-bfed-4dd6-be59-87481a211014', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.748099] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Creating folder: Project (e8ea1d9b2b194236ac9e91082b291b97). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.752657] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2962b3c-ea20-459c-b55e-4409e141a0dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.764889] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778989, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.767088] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1396.767088] env[63379]: value = "task-1778990" [ 1396.767088] env[63379]: _type = "Task" [ 1396.767088] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.769357] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Created folder: Project (e8ea1d9b2b194236ac9e91082b291b97) in parent group-v369214. [ 1396.769902] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Creating folder: Instances. Parent ref: group-v369257. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.774492] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc86238b-f863-445b-b468-e2b5fcabd0bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.785395] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.789248] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Created folder: Instances in parent group-v369257. [ 1396.789787] env[63379]: DEBUG oslo.service.loopingcall [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.789985] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.790183] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b00bc2d7-8219-4327-aeca-aa3bf2c382ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.822237] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.822237] env[63379]: value = "task-1778993" [ 1396.822237] env[63379]: _type = "Task" [ 1396.822237] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.835736] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778993, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.922651] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.923262] env[63379]: DEBUG nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1396.927294] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.799s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.930673] env[63379]: INFO nova.compute.claims [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.972966] env[63379]: DEBUG nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1397.009123] env[63379]: DEBUG oslo_vmware.api [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1778987, 'name': PowerOnVM_Task, 'duration_secs': 0.705331} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.009827] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1397.009827] env[63379]: DEBUG nova.compute.manager [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1397.010871] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47ba782-96d0-4204-ae7f-72a7f51883c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.027887] env[63379]: DEBUG nova.compute.manager [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1397.255022] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778989, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.282099] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778990, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.341455] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1778993, 'name': CreateVM_Task, 'duration_secs': 0.494091} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.341455] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.341983] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.342074] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.342716] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1397.342799] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-452b48a6-b051-4a61-bbeb-c4f7631dd038 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.349961] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1397.349961] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]523779fe-1dad-4e29-e4ab-3b829d1e578b" [ 1397.349961] env[63379]: _type = "Task" [ 1397.349961] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.360760] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523779fe-1dad-4e29-e4ab-3b829d1e578b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.434202] env[63379]: DEBUG nova.compute.utils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1397.440741] env[63379]: DEBUG nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1397.440741] env[63379]: DEBUG nova.network.neutron [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1397.506196] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.542901] env[63379]: DEBUG nova.compute.manager [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Received event network-vif-plugged-65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1397.543164] env[63379]: DEBUG oslo_concurrency.lockutils [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] Acquiring lock "25090d85-cd10-44fc-aa9d-071ada14f249-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.543379] env[63379]: DEBUG oslo_concurrency.lockutils [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] Lock "25090d85-cd10-44fc-aa9d-071ada14f249-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.543546] env[63379]: DEBUG oslo_concurrency.lockutils [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] Lock "25090d85-cd10-44fc-aa9d-071ada14f249-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.543713] env[63379]: DEBUG nova.compute.manager [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] No waiting events found dispatching network-vif-plugged-65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1397.543874] env[63379]: WARNING nova.compute.manager [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Received unexpected event network-vif-plugged-65e3bc3b-bfed-4dd6-be59-87481a211014 for instance with vm_state building and task_state spawning. [ 1397.545109] env[63379]: DEBUG nova.compute.manager [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Received event network-changed-65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1397.545340] env[63379]: DEBUG nova.compute.manager [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Refreshing instance network info cache due to event network-changed-65e3bc3b-bfed-4dd6-be59-87481a211014. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1397.545546] env[63379]: DEBUG oslo_concurrency.lockutils [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] Acquiring lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.545894] env[63379]: DEBUG oslo_concurrency.lockutils [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] Acquired lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.545946] env[63379]: DEBUG nova.network.neutron [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Refreshing network info cache for port 65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.551279] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.565832] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.651573] env[63379]: DEBUG nova.policy [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f210ac178524942a52c00e061345445', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0e95e0ca1804616a5d258396749d295', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1397.711053] env[63379]: DEBUG nova.compute.manager [req-a33c3d2f-40a7-4503-8371-b08647db11ae req-1756b69f-8d12-42e3-aa45-628eeb940dc5 service nova] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Received event network-vif-deleted-01b61007-ef4f-4f75-871c-33a30b49ecf4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1397.753436] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778989, 'name': ReconfigVM_Task, 'duration_secs': 0.849583} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.753672] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 30908171-e1b9-4e20-830e-419ff6d9a0fa/30908171-e1b9-4e20-830e-419ff6d9a0fa.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1397.754481] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7efc712d-7c30-4e3a-9887-deae9ea583e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.765340] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1397.765340] env[63379]: value = "task-1778994" [ 1397.765340] env[63379]: _type = "Task" [ 1397.765340] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.784446] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778994, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.788754] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.738655} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.788754] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] bf0dd3cf-684c-4378-a89c-5b9f16df062d/bf0dd3cf-684c-4378-a89c-5b9f16df062d.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1397.788754] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1397.789049] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee98b22e-2380-4046-8df9-fae7210dea87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.802614] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1397.802614] env[63379]: value = "task-1778995" [ 1397.802614] env[63379]: _type = "Task" [ 1397.802614] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.816167] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778995, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.869286] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523779fe-1dad-4e29-e4ab-3b829d1e578b, 'name': SearchDatastore_Task, 'duration_secs': 0.059529} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.869767] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.870475] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1397.870475] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.870805] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.870863] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1397.871884] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7ccf6b7-eeb3-4396-b8f5-e441c0538974 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.884100] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1397.884376] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1397.885560] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1e44888-8353-47f6-9986-c375b7529cfe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.896904] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1397.896904] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522463de-dd3f-2e20-90b9-bae11b46b848" [ 1397.896904] env[63379]: _type = "Task" [ 1397.896904] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.910124] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522463de-dd3f-2e20-90b9-bae11b46b848, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.939925] env[63379]: DEBUG nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1398.286317] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778994, 'name': Rename_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.322669] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778995, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105545} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.322669] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1398.322669] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d4c01c-7efb-4ab9-b386-1d0d80d85312 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.364398] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] bf0dd3cf-684c-4378-a89c-5b9f16df062d/bf0dd3cf-684c-4378-a89c-5b9f16df062d.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1398.367896] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af940490-1c10-4d7b-8166-887b523f00ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.391527] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1398.391527] env[63379]: value = "task-1778996" [ 1398.391527] env[63379]: _type = "Task" [ 1398.391527] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.405171] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778996, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.417151] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522463de-dd3f-2e20-90b9-bae11b46b848, 'name': SearchDatastore_Task, 'duration_secs': 0.037318} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.418231] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcc949f0-a437-4b28-a90e-642accb33ccb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.430704] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1398.430704] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b0e07b-381e-8867-0bf2-7160f3a99bc4" [ 1398.430704] env[63379]: _type = "Task" [ 1398.430704] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.443950] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b0e07b-381e-8867-0bf2-7160f3a99bc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.448076] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cf055f-fa12-4507-b1b5-d7af7c783a30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.452712] env[63379]: INFO nova.virt.block_device [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Booting with volume 263e34b9-3753-4240-8bd6-67c4019d79ae at /dev/sda [ 1398.458071] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a82cad6-78d8-4373-9b4c-1b9fe85f9ddd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.518434] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afd91fe-e395-49a3-81e1-257391a5a67d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.530308] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab830114-9d6d-478a-9fe4-8a667d47c5d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.544257] env[63379]: DEBUG nova.compute.provider_tree [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1398.550020] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94f6b81f-3aa9-43a3-8f8f-78f2edf6b8a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.560285] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef66cc6-ea9e-4153-be6a-4810565b740d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.603176] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fab06c75-6c43-4f6b-a084-63e91f4837fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.615823] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2220d51a-72d8-4920-996e-8cc35de99d94 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.657575] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b504449-324e-4727-b716-08750fc7988a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.666753] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a95b45-12cc-4334-bbf5-92ab3448d0fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.686372] env[63379]: DEBUG nova.virt.block_device [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updating existing volume attachment record: 98e0b208-b121-4c58-a878-a32d4486a20b {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1398.783456] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778994, 'name': Rename_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.903245] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778996, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.943020] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b0e07b-381e-8867-0bf2-7160f3a99bc4, 'name': SearchDatastore_Task, 'duration_secs': 0.024791} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.943332] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.943625] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 25090d85-cd10-44fc-aa9d-071ada14f249/25090d85-cd10-44fc-aa9d-071ada14f249.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1398.943900] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7d753a7-382a-45c0-b2ac-0e8012e912b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.954705] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1398.954705] env[63379]: value = "task-1778997" [ 1398.954705] env[63379]: _type = "Task" [ 1398.954705] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.961733] env[63379]: DEBUG nova.network.neutron [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updated VIF entry in instance network info cache for port 65e3bc3b-bfed-4dd6-be59-87481a211014. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.962173] env[63379]: DEBUG nova.network.neutron [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updating instance_info_cache with network_info: [{"id": "65e3bc3b-bfed-4dd6-be59-87481a211014", "address": "fa:16:3e:cf:dd:6f", "network": {"id": "66f99999-2093-485c-98b2-12d4a173be2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1040360959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8ea1d9b2b194236ac9e91082b291b97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e3bc3b-bf", "ovs_interfaceid": "65e3bc3b-bfed-4dd6-be59-87481a211014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.967467] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1778997, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.053198] env[63379]: DEBUG nova.scheduler.client.report [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1399.171273] env[63379]: DEBUG nova.network.neutron [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Successfully created port: 6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1399.283045] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778994, 'name': Rename_Task, 'duration_secs': 1.273884} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.283267] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1399.283567] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-300126c2-7b2a-4d52-b191-c62124f5178d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.291810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.292053] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.299272] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1399.299272] env[63379]: value = "task-1778998" [ 1399.299272] env[63379]: _type = "Task" [ 1399.299272] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.312301] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.403795] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778996, 'name': ReconfigVM_Task, 'duration_secs': 0.674231} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.404474] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Reconfigured VM instance instance-0000000d to attach disk [datastore1] bf0dd3cf-684c-4378-a89c-5b9f16df062d/bf0dd3cf-684c-4378-a89c-5b9f16df062d.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1399.404816] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51181e2f-60c9-4bc2-9292-84ee9d262dd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.415948] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1399.415948] env[63379]: value = "task-1778999" [ 1399.415948] env[63379]: _type = "Task" [ 1399.415948] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.430692] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778999, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.469910] env[63379]: DEBUG oslo_concurrency.lockutils [req-dd8235d9-654f-46c1-9754-eeffa4901e6b req-e088145e-d8c6-4116-95ab-333dc0ec80c5 service nova] Releasing lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.473788] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1778997, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.478852] env[63379]: DEBUG nova.network.neutron [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Successfully updated port: 901f3c6f-920f-4eed-b3b6-2ba116322aae {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1399.558690] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.559246] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1399.570023] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.536s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.570023] env[63379]: DEBUG nova.objects.instance [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lazy-loading 'resources' on Instance uuid 724c7a22-1833-4dc5-ab38-a11498a83ab8 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1399.812587] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778998, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.929622] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1778999, 'name': Rename_Task, 'duration_secs': 0.298731} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.930024] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1399.933163] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6dc2c0c9-d2f6-4cbf-957b-3db92321af30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.939547] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1399.939547] env[63379]: value = "task-1779000" [ 1399.939547] env[63379]: _type = "Task" [ 1399.939547] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.958239] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.971638] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1778997, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671865} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.972068] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 25090d85-cd10-44fc-aa9d-071ada14f249/25090d85-cd10-44fc-aa9d-071ada14f249.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1399.972306] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1399.972573] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fa5c2b0-53a0-4e8c-b7bc-ea132a85c17e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.982056] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "refresh_cache-aaaf4b06-ef84-41ba-8054-29582854a9f1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.982280] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquired lock "refresh_cache-aaaf4b06-ef84-41ba-8054-29582854a9f1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.982492] env[63379]: DEBUG nova.network.neutron [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1399.989689] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1399.989689] env[63379]: value = "task-1779001" [ 1399.989689] env[63379]: _type = "Task" [ 1399.989689] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.002528] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.072232] env[63379]: DEBUG nova.compute.utils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1400.078632] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1400.078819] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1400.182361] env[63379]: DEBUG nova.policy [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db6bd541e63b47e29e5c02fc02f162c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3d2c2429642ee92f4bb7e53b0a128', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1400.312126] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778998, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.450448] env[63379]: INFO nova.compute.manager [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Rebuilding instance [ 1400.465434] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779000, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.510980] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076035} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.511395] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1400.512377] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ec3c6d-c818-470c-b96b-1ebe85c4dfe1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.536404] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 25090d85-cd10-44fc-aa9d-071ada14f249/25090d85-cd10-44fc-aa9d-071ada14f249.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1400.539698] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48b4532e-0d2a-47f0-8fb4-5f4b6e537d2c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.564174] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1400.564174] env[63379]: value = "task-1779002" [ 1400.564174] env[63379]: _type = "Task" [ 1400.564174] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.575929] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779002, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.582227] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1400.599060] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3df8b3-f028-4e2b-aa79-ff72af628b04 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.610322] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7d04fb-ca10-4a5a-9f17-c10f6de709c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.617789] env[63379]: DEBUG nova.network.neutron [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1400.659854] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf09f31-ca5d-4cd2-a018-a27774c72e06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.669736] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a918b551-5f0c-4ecb-b716-2ab4da67fa98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.685750] env[63379]: DEBUG nova.compute.provider_tree [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.817261] env[63379]: DEBUG oslo_vmware.api [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1778998, 'name': PowerOnVM_Task, 'duration_secs': 1.042254} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.817580] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1400.817775] env[63379]: INFO nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Took 24.08 seconds to spawn the instance on the hypervisor. [ 1400.817962] env[63379]: DEBUG nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1400.818789] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ae7526-5043-4c65-bf0e-68962c224dd4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.855584] env[63379]: DEBUG nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1400.857142] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1400.857142] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1400.857142] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1400.857142] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1400.857142] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1400.857142] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1400.860757] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1400.860757] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1400.860841] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1400.861074] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1400.861286] env[63379]: DEBUG nova.virt.hardware [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1400.862190] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed882618-478a-42cf-a13b-aaf30e60c61d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.871686] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d1e3df-9f7d-4de5-a23a-fcd18ba0b675 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.895702] env[63379]: DEBUG nova.compute.manager [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Received event network-changed-775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1400.895938] env[63379]: DEBUG nova.compute.manager [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Refreshing instance network info cache due to event network-changed-775d733a-ad5b-4b39-aa69-8b4a577c4348. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1400.896310] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Acquiring lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.896509] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Acquired lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.896700] env[63379]: DEBUG nova.network.neutron [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Refreshing network info cache for port 775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1400.939351] env[63379]: DEBUG nova.network.neutron [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Updating instance_info_cache with network_info: [{"id": "901f3c6f-920f-4eed-b3b6-2ba116322aae", "address": "fa:16:3e:36:6d:35", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap901f3c6f-92", "ovs_interfaceid": "901f3c6f-920f-4eed-b3b6-2ba116322aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.960902] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779000, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.011946] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Successfully created port: 5bb23315-a5dc-438f-bb8e-fc90360f23ec {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1401.078220] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779002, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.189174] env[63379]: DEBUG nova.scheduler.client.report [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1401.348571] env[63379]: INFO nova.compute.manager [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Took 39.53 seconds to build instance. [ 1401.444334] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Releasing lock "refresh_cache-aaaf4b06-ef84-41ba-8054-29582854a9f1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.444334] env[63379]: DEBUG nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Instance network_info: |[{"id": "901f3c6f-920f-4eed-b3b6-2ba116322aae", "address": "fa:16:3e:36:6d:35", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap901f3c6f-92", "ovs_interfaceid": "901f3c6f-920f-4eed-b3b6-2ba116322aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1401.444334] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:6d:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '901f3c6f-920f-4eed-b3b6-2ba116322aae', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1401.456674] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Creating folder: Project (bf8ce6145b424cd88396c7b1f31e4498). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1401.457258] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50182056-b01b-4a7c-b684-65b4f83e3890 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.476218] env[63379]: DEBUG oslo_vmware.api [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779000, 'name': PowerOnVM_Task, 'duration_secs': 1.120149} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.479967] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.479967] env[63379]: INFO nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Took 11.65 seconds to spawn the instance on the hypervisor. [ 1401.480199] env[63379]: DEBUG nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.480406] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Created folder: Project (bf8ce6145b424cd88396c7b1f31e4498) in parent group-v369214. [ 1401.480544] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Creating folder: Instances. Parent ref: group-v369260. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1401.481431] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5d191c-d94f-4b76-bd75-420e8d5a61a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.483902] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af2307f3-4368-45e8-8e71-718818edbc5d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.497403] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Created folder: Instances in parent group-v369260. [ 1401.498026] env[63379]: DEBUG oslo.service.loopingcall [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1401.498026] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1401.498422] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80658675-a6ca-4861-85dc-2c5172ef528d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.525656] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1401.525656] env[63379]: value = "task-1779005" [ 1401.525656] env[63379]: _type = "Task" [ 1401.525656] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.537774] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779005, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.577109] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779002, 'name': ReconfigVM_Task, 'duration_secs': 0.641321} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.577368] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 25090d85-cd10-44fc-aa9d-071ada14f249/25090d85-cd10-44fc-aa9d-071ada14f249.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1401.578027] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd069df7-a76d-454a-8ef2-51cf92b22c78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.587897] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1401.587897] env[63379]: value = "task-1779006" [ 1401.587897] env[63379]: _type = "Task" [ 1401.587897] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.600856] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1401.622219] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779006, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.695388] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.698142] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.939s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.699645] env[63379]: INFO nova.compute.claims [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1401.725511] env[63379]: INFO nova.scheduler.client.report [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Deleted allocations for instance 724c7a22-1833-4dc5-ab38-a11498a83ab8 [ 1401.831310] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1401.831310] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1401.831604] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1401.831638] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1401.831788] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1401.831889] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1401.835387] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1401.835767] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1401.836113] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1401.836470] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1401.836888] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1401.838129] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ffd4a5-2e7c-432a-8040-e6110114211d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.849782] env[63379]: DEBUG nova.compute.manager [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.850980] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dd8679-3a98-455e-8797-def7f4289e06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.860515] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce66ae40-b889-4600-970e-01db61dd54bb tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.050s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.865878] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652ef837-7125-4521-8876-f49a8c9debfb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.014254] env[63379]: INFO nova.compute.manager [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Took 20.63 seconds to build instance. [ 1402.018292] env[63379]: DEBUG nova.network.neutron [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Successfully updated port: 6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1402.045949] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779005, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.103194] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779006, 'name': Rename_Task, 'duration_secs': 0.229079} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.103605] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1402.103918] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f06a8948-f945-4d88-925f-f106f9cdfe85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.107278] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.107520] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.107737] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.107927] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.108121] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.110329] env[63379]: INFO nova.compute.manager [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Terminating instance [ 1402.112358] env[63379]: DEBUG nova.compute.manager [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1402.112595] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1402.113465] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57ad6cc-2d39-46ef-836e-0dcd12efbe71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.118515] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1402.118515] env[63379]: value = "task-1779007" [ 1402.118515] env[63379]: _type = "Task" [ 1402.118515] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.127056] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.129205] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85ed2be4-44d1-4567-b180-cd351ae223fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.141264] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779007, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.141264] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1402.141264] env[63379]: value = "task-1779008" [ 1402.141264] env[63379]: _type = "Task" [ 1402.141264] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.151873] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1779008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.236455] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7ff48fb-4913-4494-becc-b77f512d1ef8 tempest-ServerDiagnosticsTest-1790319842 tempest-ServerDiagnosticsTest-1790319842-project-member] Lock "724c7a22-1833-4dc5-ab38-a11498a83ab8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.334s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.359689] env[63379]: DEBUG nova.network.neutron [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Updated VIF entry in instance network info cache for port 775d733a-ad5b-4b39-aa69-8b4a577c4348. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1402.360163] env[63379]: DEBUG nova.network.neutron [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Updating instance_info_cache with network_info: [{"id": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "address": "fa:16:3e:a6:1c:0d", "network": {"id": "199703e5-fc18-408d-a14c-e309d6e3fb78", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1056046585-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "565c1cd0648646e19d73a6b1546ba28f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap775d733a-ad", "ovs_interfaceid": "775d733a-ad5b-4b39-aa69-8b4a577c4348", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.369934] env[63379]: DEBUG nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1402.379478] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.380836] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eaa9909d-db64-4ac2-827a-f938eeb9c767 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.391359] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1402.391359] env[63379]: value = "task-1779009" [ 1402.391359] env[63379]: _type = "Task" [ 1402.391359] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.404832] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.521784] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a9d9af-5fc3-47be-8ad6-ab3883fa3434 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.746s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.522154] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Acquiring lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.522301] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Acquired lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.522455] env[63379]: DEBUG nova.network.neutron [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1402.542393] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779005, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.631014] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779007, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.654524] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1779008, 'name': PowerOffVM_Task, 'duration_secs': 0.34953} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.655058] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.655231] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.655998] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d45867d4-69b6-4a51-b05d-dea7110a591f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.868659] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Releasing lock "refresh_cache-d47be684-6cd8-45c6-8c6a-9a6db0390f97" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.869083] env[63379]: DEBUG nova.compute.manager [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Received event network-vif-plugged-901f3c6f-920f-4eed-b3b6-2ba116322aae {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1402.871923] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Acquiring lock "aaaf4b06-ef84-41ba-8054-29582854a9f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.871923] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.871923] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.871923] env[63379]: DEBUG nova.compute.manager [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] No waiting events found dispatching network-vif-plugged-901f3c6f-920f-4eed-b3b6-2ba116322aae {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1402.871923] env[63379]: WARNING nova.compute.manager [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Received unexpected event network-vif-plugged-901f3c6f-920f-4eed-b3b6-2ba116322aae for instance with vm_state building and task_state spawning. [ 1402.871923] env[63379]: DEBUG nova.compute.manager [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Received event network-changed-901f3c6f-920f-4eed-b3b6-2ba116322aae {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1402.871923] env[63379]: DEBUG nova.compute.manager [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Refreshing instance network info cache due to event network-changed-901f3c6f-920f-4eed-b3b6-2ba116322aae. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1402.871923] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Acquiring lock "refresh_cache-aaaf4b06-ef84-41ba-8054-29582854a9f1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.871923] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Acquired lock "refresh_cache-aaaf4b06-ef84-41ba-8054-29582854a9f1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.871923] env[63379]: DEBUG nova.network.neutron [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Refreshing network info cache for port 901f3c6f-920f-4eed-b3b6-2ba116322aae {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1402.899358] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.905755] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779009, 'name': PowerOffVM_Task, 'duration_secs': 0.315037} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.906807] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.907552] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1402.908060] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3806d7-9c47-4d31-bf7a-633ca6bc0024 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.917966] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.918320] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef60c986-a93b-4e2d-ad7f-4cb43aded97a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.032589] env[63379]: DEBUG nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1403.045626] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779005, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.125603] env[63379]: DEBUG nova.network.neutron [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1403.137990] env[63379]: DEBUG oslo_vmware.api [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779007, 'name': PowerOnVM_Task, 'duration_secs': 0.582494} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.139562] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1403.139562] env[63379]: INFO nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Took 10.68 seconds to spawn the instance on the hypervisor. [ 1403.139562] env[63379]: DEBUG nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1403.139861] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b875ae95-c6c7-4273-b6aa-b0491c4050d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.145991] env[63379]: DEBUG nova.compute.manager [req-ed64c804-134a-48b6-b1c9-9635da1d0d38 req-38e3d3a0-62f0-4190-93d6-a6c177ca9844 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Received event network-vif-plugged-6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1403.146237] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed64c804-134a-48b6-b1c9-9635da1d0d38 req-38e3d3a0-62f0-4190-93d6-a6c177ca9844 service nova] Acquiring lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.146456] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed64c804-134a-48b6-b1c9-9635da1d0d38 req-38e3d3a0-62f0-4190-93d6-a6c177ca9844 service nova] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.146789] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed64c804-134a-48b6-b1c9-9635da1d0d38 req-38e3d3a0-62f0-4190-93d6-a6c177ca9844 service nova] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.147325] env[63379]: DEBUG nova.compute.manager [req-ed64c804-134a-48b6-b1c9-9635da1d0d38 req-38e3d3a0-62f0-4190-93d6-a6c177ca9844 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] No waiting events found dispatching network-vif-plugged-6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1403.147325] env[63379]: WARNING nova.compute.manager [req-ed64c804-134a-48b6-b1c9-9635da1d0d38 req-38e3d3a0-62f0-4190-93d6-a6c177ca9844 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Received unexpected event network-vif-plugged-6b70ec9a-65bb-4a1c-9312-97031fc4fc46 for instance with vm_state building and task_state spawning. [ 1403.164901] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad1a762-0478-4ba9-b454-55aa368f799c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.178492] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81e1cd7-69aa-45ab-98d5-cbc0a1c5ecac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.214318] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a973c1f-1a17-4190-9b97-3c2248e81acb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.225346] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5751ddd3-f06e-401e-8a7e-805d3468b812 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.243447] env[63379]: DEBUG nova.compute.provider_tree [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1403.505799] env[63379]: DEBUG nova.network.neutron [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updating instance_info_cache with network_info: [{"id": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "address": "fa:16:3e:8f:e4:09", "network": {"id": "5cdac896-d067-4c9e-9ccd-954fce726e11", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-637047934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0e95e0ca1804616a5d258396749d295", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b70ec9a-65", "ovs_interfaceid": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.559054] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779005, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.570181] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.574552] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1403.574876] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1403.575180] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Deleting the datastore file [datastore1] a6f7c217-a493-403d-b776-870df4575f2a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.578337] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30535a2d-9270-4f55-84ba-9dbbde081415 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.580263] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1403.580483] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1403.580667] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Deleting the datastore file [datastore1] 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.580950] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d299d42-2df4-48f6-9b70-6d388c7500d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.590373] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1403.590373] env[63379]: value = "task-1779012" [ 1403.590373] env[63379]: _type = "Task" [ 1403.590373] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.592194] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for the task: (returnval){ [ 1403.592194] env[63379]: value = "task-1779013" [ 1403.592194] env[63379]: _type = "Task" [ 1403.592194] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.607441] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1779013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.612016] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.670790] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.671089] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.677044] env[63379]: INFO nova.compute.manager [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Took 21.71 seconds to build instance. [ 1403.748868] env[63379]: DEBUG nova.scheduler.client.report [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1403.817613] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Successfully updated port: 5bb23315-a5dc-438f-bb8e-fc90360f23ec {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1403.923283] env[63379]: DEBUG nova.network.neutron [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Updated VIF entry in instance network info cache for port 901f3c6f-920f-4eed-b3b6-2ba116322aae. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1403.923688] env[63379]: DEBUG nova.network.neutron [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Updating instance_info_cache with network_info: [{"id": "901f3c6f-920f-4eed-b3b6-2ba116322aae", "address": "fa:16:3e:36:6d:35", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap901f3c6f-92", "ovs_interfaceid": "901f3c6f-920f-4eed-b3b6-2ba116322aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.009750] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Releasing lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.010184] env[63379]: DEBUG nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Instance network_info: |[{"id": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "address": "fa:16:3e:8f:e4:09", "network": {"id": "5cdac896-d067-4c9e-9ccd-954fce726e11", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-637047934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0e95e0ca1804616a5d258396749d295", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b70ec9a-65", "ovs_interfaceid": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1404.010975] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:e4:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b70ec9a-65bb-4a1c-9312-97031fc4fc46', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1404.019974] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Creating folder: Project (e0e95e0ca1804616a5d258396749d295). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.019974] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4da1f1b-5024-4882-a45c-7b42460576ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.039019] env[63379]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1404.039019] env[63379]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63379) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1404.039019] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Folder already exists: Project (e0e95e0ca1804616a5d258396749d295). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1404.039019] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Creating folder: Instances. Parent ref: group-v369215. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.042706] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-598228f2-59e6-4dec-a6d3-07d8d875d30f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.052871] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779005, 'name': CreateVM_Task, 'duration_secs': 2.106784} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.053327] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1404.054337] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.054734] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.055209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1404.057764] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a4e04c2-fc37-46a0-abb7-70e7fab3782d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.061139] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Created folder: Instances in parent group-v369215. [ 1404.061139] env[63379]: DEBUG oslo.service.loopingcall [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.061139] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1404.061139] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bd69750-48ac-4da3-9058-1bac12311536 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.089412] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1404.089412] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa0ee5-5012-e1fa-8f82-3a50f07c79fd" [ 1404.089412] env[63379]: _type = "Task" [ 1404.089412] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.100625] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1404.100625] env[63379]: value = "task-1779016" [ 1404.100625] env[63379]: _type = "Task" [ 1404.100625] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.114706] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779016, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.123016] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.123438] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1779013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.127431] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa0ee5-5012-e1fa-8f82-3a50f07c79fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.180865] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bde0b6b8-2878-456d-94ee-ebdddc12a24a tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "25090d85-cd10-44fc-aa9d-071ada14f249" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.760s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.253962] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.254557] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1404.257244] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.381s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.257495] env[63379]: DEBUG nova.objects.instance [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lazy-loading 'resources' on Instance uuid c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1404.320770] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "refresh_cache-571bb238-9cf3-475e-b596-a9609acc8696" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.320996] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "refresh_cache-571bb238-9cf3-475e-b596-a9609acc8696" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.321127] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1404.430520] env[63379]: DEBUG oslo_concurrency.lockutils [req-e632a5e7-e1af-4120-8106-7e60ef406c1e req-0e169a6f-8805-404b-b75d-163abefb1505 service nova] Releasing lock "refresh_cache-aaaf4b06-ef84-41ba-8054-29582854a9f1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.612027] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.612329] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa0ee5-5012-e1fa-8f82-3a50f07c79fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.622815] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779016, 'name': CreateVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.630235] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1779013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.683906] env[63379]: DEBUG nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1404.764535] env[63379]: DEBUG nova.compute.utils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1404.769524] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1404.769729] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1404.833077] env[63379]: DEBUG nova.compute.manager [req-7a9e981f-9593-43e9-8512-50763f924504 req-6116ea89-ae3e-4a64-aa11-5c021ee78d7b service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Received event network-vif-plugged-5bb23315-a5dc-438f-bb8e-fc90360f23ec {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1404.833485] env[63379]: DEBUG oslo_concurrency.lockutils [req-7a9e981f-9593-43e9-8512-50763f924504 req-6116ea89-ae3e-4a64-aa11-5c021ee78d7b service nova] Acquiring lock "571bb238-9cf3-475e-b596-a9609acc8696-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.833575] env[63379]: DEBUG oslo_concurrency.lockutils [req-7a9e981f-9593-43e9-8512-50763f924504 req-6116ea89-ae3e-4a64-aa11-5c021ee78d7b service nova] Lock "571bb238-9cf3-475e-b596-a9609acc8696-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.833747] env[63379]: DEBUG oslo_concurrency.lockutils [req-7a9e981f-9593-43e9-8512-50763f924504 req-6116ea89-ae3e-4a64-aa11-5c021ee78d7b service nova] Lock "571bb238-9cf3-475e-b596-a9609acc8696-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.833922] env[63379]: DEBUG nova.compute.manager [req-7a9e981f-9593-43e9-8512-50763f924504 req-6116ea89-ae3e-4a64-aa11-5c021ee78d7b service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] No waiting events found dispatching network-vif-plugged-5bb23315-a5dc-438f-bb8e-fc90360f23ec {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1404.834135] env[63379]: WARNING nova.compute.manager [req-7a9e981f-9593-43e9-8512-50763f924504 req-6116ea89-ae3e-4a64-aa11-5c021ee78d7b service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Received unexpected event network-vif-plugged-5bb23315-a5dc-438f-bb8e-fc90360f23ec for instance with vm_state building and task_state spawning. [ 1404.878771] env[63379]: DEBUG nova.policy [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db6bd541e63b47e29e5c02fc02f162c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3d2c2429642ee92f4bb7e53b0a128', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1404.889810] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1405.116211] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa0ee5-5012-e1fa-8f82-3a50f07c79fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.123988] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.128325] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1779013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.136311] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779016, 'name': CreateVM_Task} progress is 15%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.226191] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.257699] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46e2948-ae9f-41f9-bea5-a199f7a68d26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.268828] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0599fd-37b5-46fd-afb2-6710611fb90e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.275480] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1405.309678] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Updating instance_info_cache with network_info: [{"id": "5bb23315-a5dc-438f-bb8e-fc90360f23ec", "address": "fa:16:3e:de:b8:cd", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bb23315-a5", "ovs_interfaceid": "5bb23315-a5dc-438f-bb8e-fc90360f23ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.311476] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced0bac3-b89c-49d7-b416-9a9530504f77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.321274] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b49f747-c790-4cbb-987b-414c8c1502d3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.340976] env[63379]: DEBUG nova.compute.provider_tree [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1405.439791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "30908171-e1b9-4e20-830e-419ff6d9a0fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.440214] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.440501] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.440761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.445175] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.448032] env[63379]: INFO nova.compute.manager [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Terminating instance [ 1405.454135] env[63379]: DEBUG nova.compute.manager [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1405.454445] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1405.458289] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759dacb9-b658-4896-a9f9-836ac9288795 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.467050] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1405.467050] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e47d432-32a8-499c-b547-082bdcde012c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.475348] env[63379]: DEBUG oslo_vmware.api [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1405.475348] env[63379]: value = "task-1779017" [ 1405.475348] env[63379]: _type = "Task" [ 1405.475348] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.487818] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.489118] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.496453] env[63379]: DEBUG oslo_vmware.api [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779017, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.636603] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa0ee5-5012-e1fa-8f82-3a50f07c79fd, 'name': SearchDatastore_Task, 'duration_secs': 1.061879} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.644962] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.645534] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1405.645803] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.648951] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.648951] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1405.648951] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.683419} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.648951] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2003dd4b-caf7-4d05-a319-4a6e328a61c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.649251] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1405.649486] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1405.649637] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1405.657223] env[63379]: DEBUG oslo_vmware.api [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Task: {'id': task-1779013, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.75049} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.658132] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1405.658407] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1405.658648] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1405.658914] env[63379]: INFO nova.compute.manager [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Took 3.55 seconds to destroy the instance on the hypervisor. [ 1405.659339] env[63379]: DEBUG oslo.service.loopingcall [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1405.663158] env[63379]: DEBUG nova.compute.manager [-] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1405.663290] env[63379]: DEBUG nova.network.neutron [-] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1405.665113] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779016, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.667067] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1405.667067] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1405.667908] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6573ae4b-b2ea-410c-b4d6-97be9cd8b6e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.676075] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1405.676075] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5278ace6-1a33-cc71-a3c3-54544b7b1a02" [ 1405.676075] env[63379]: _type = "Task" [ 1405.676075] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.688227] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5278ace6-1a33-cc71-a3c3-54544b7b1a02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.820059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "refresh_cache-571bb238-9cf3-475e-b596-a9609acc8696" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.820059] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Instance network_info: |[{"id": "5bb23315-a5dc-438f-bb8e-fc90360f23ec", "address": "fa:16:3e:de:b8:cd", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bb23315-a5", "ovs_interfaceid": "5bb23315-a5dc-438f-bb8e-fc90360f23ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1405.820059] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:b8:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bb23315-a5dc-438f-bb8e-fc90360f23ec', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1405.832015] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Creating folder: Project (dce3d2c2429642ee92f4bb7e53b0a128). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1405.832298] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73dd65f5-3295-461b-a9aa-9bb77309a5dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.838032] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Successfully created port: 72ec59a1-3694-48aa-884a-9e0b1cebb603 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1405.850655] env[63379]: DEBUG nova.scheduler.client.report [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1405.855389] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Created folder: Project (dce3d2c2429642ee92f4bb7e53b0a128) in parent group-v369214. [ 1405.855389] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Creating folder: Instances. Parent ref: group-v369265. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1405.855389] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3ac1f61-c242-4622-b75f-07e5758b58f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.872583] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Created folder: Instances in parent group-v369265. [ 1405.872583] env[63379]: DEBUG oslo.service.loopingcall [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1405.872583] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1405.872583] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84b0b12e-275c-4683-a4b5-afb9ac0ab37a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.892331] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1405.892331] env[63379]: value = "task-1779020" [ 1405.892331] env[63379]: _type = "Task" [ 1405.892331] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.904903] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779020, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.988284] env[63379]: DEBUG oslo_vmware.api [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779017, 'name': PowerOffVM_Task, 'duration_secs': 0.287102} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.988664] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.988856] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1405.989144] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2fc6f888-50d3-4ac0-b95c-3ba9ff8e28ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.143745] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779016, 'name': CreateVM_Task, 'duration_secs': 1.65623} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.144015] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1406.145309] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369228', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'name': 'volume-263e34b9-3753-4240-8bd6-67c4019d79ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '15d19ce3-ea71-47ff-a738-9ba00b8dfcf1', 'attached_at': '', 'detached_at': '', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'serial': '263e34b9-3753-4240-8bd6-67c4019d79ae'}, 'attachment_id': '98e0b208-b121-4c58-a878-a32d4486a20b', 'boot_index': 0, 'mount_device': '/dev/sda', 'disk_bus': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63379) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1406.145309] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Root volume attach. Driver type: vmdk {{(pid=63379) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1406.145783] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5735fecb-6541-4b69-b4b6-d4bf712ad3d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.158169] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ea2295-1c6b-4159-949a-025b31270c07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.170300] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4aee2d-76de-4a2c-b8f3-ab2a2af31f2e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.186201] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1406.188072] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1406.188072] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Deleting the datastore file [datastore1] 30908171-e1b9-4e20-830e-419ff6d9a0fa {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1406.188072] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-a6c6d61d-984e-481a-8632-e81a66d41a01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.190575] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ada46f58-e437-4d6d-9843-475b6c7ef0bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.200019] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5278ace6-1a33-cc71-a3c3-54544b7b1a02, 'name': SearchDatastore_Task, 'duration_secs': 0.020344} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.200019] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-726a3d61-897b-46ea-a259-cefb7bd543c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.204225] env[63379]: DEBUG oslo_vmware.api [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1406.204225] env[63379]: value = "task-1779022" [ 1406.204225] env[63379]: _type = "Task" [ 1406.204225] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.211091] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1406.211091] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cee0db-b7d8-b244-f02c-f2a9befa6326" [ 1406.211091] env[63379]: _type = "Task" [ 1406.211091] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.211324] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1406.211324] env[63379]: value = "task-1779023" [ 1406.211324] env[63379]: _type = "Task" [ 1406.211324] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.222395] env[63379]: DEBUG oslo_vmware.api [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.236048] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cee0db-b7d8-b244-f02c-f2a9befa6326, 'name': SearchDatastore_Task, 'duration_secs': 0.017614} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.236365] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779023, 'name': RelocateVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.236682] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.236978] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aaaf4b06-ef84-41ba-8054-29582854a9f1/aaaf4b06-ef84-41ba-8054-29582854a9f1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1406.237517] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e939b3c-38eb-4c8a-bc0c-04097ed1baac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.247429] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1406.247429] env[63379]: value = "task-1779024" [ 1406.247429] env[63379]: _type = "Task" [ 1406.247429] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.258298] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.291447] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1406.329161] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1406.329405] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1406.329494] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1406.329772] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1406.330692] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1406.330692] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1406.330692] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1406.330692] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1406.330692] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1406.330946] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1406.330991] env[63379]: DEBUG nova.virt.hardware [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1406.332415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de331d9-b68c-48ff-be69-63c264f2d263 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.343709] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c17a796-366c-4626-b338-ced651c22952 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.362288] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.105s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.365312] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.745s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.367093] env[63379]: INFO nova.compute.claims [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1406.399324] env[63379]: INFO nova.scheduler.client.report [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleted allocations for instance c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c [ 1406.406046] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779020, 'name': CreateVM_Task, 'duration_secs': 0.496991} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.408565] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1406.408814] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.409022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.410092] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1406.410092] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcbc1dbc-97ef-4caa-a1f5-dab275cfbf93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.417700] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1406.417700] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52980d4b-b907-7043-b658-e1477f9beefc" [ 1406.417700] env[63379]: _type = "Task" [ 1406.417700] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.428081] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52980d4b-b907-7043-b658-e1477f9beefc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.699708] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1406.700557] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1406.700821] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1406.701363] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1406.701565] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1406.701782] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1406.702076] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1406.702490] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1406.702754] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1406.703052] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1406.703129] env[63379]: DEBUG nova.virt.hardware [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1406.704409] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03102d11-0aea-4d8d-8338-caaf1ef1f137 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.726947] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3207fbac-1768-40a7-9ce7-07ef83953d68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.735217] env[63379]: DEBUG oslo_vmware.api [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328359} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.739564] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1406.740583] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1406.740583] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1406.740836] env[63379]: INFO nova.compute.manager [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1406.741565] env[63379]: DEBUG oslo.service.loopingcall [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1406.741864] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779023, 'name': RelocateVM_Task} progress is 20%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.743218] env[63379]: DEBUG nova.compute.manager [-] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1406.743218] env[63379]: DEBUG nova.network.neutron [-] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1406.754761] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1406.761016] env[63379]: DEBUG oslo.service.loopingcall [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1406.766278] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1406.766278] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecc99e09-4d15-42aa-ba9d-08ad6e735644 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.787311] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779024, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.788699] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1406.788699] env[63379]: value = "task-1779025" [ 1406.788699] env[63379]: _type = "Task" [ 1406.788699] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.797850] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779025, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.912517] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0dfd20b4-6999-49b9-b231-d7c89dc8ea24 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.522s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.928342] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52980d4b-b907-7043-b658-e1477f9beefc, 'name': SearchDatastore_Task, 'duration_secs': 0.015701} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.928671] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.928909] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1406.929170] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.929333] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.929505] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1406.929773] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a09097e4-27f8-467a-8f3d-98756e88e5c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.941666] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1406.942010] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1406.943091] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f8134a1-6d56-4d7b-90cc-58e6a850b1ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.950316] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1406.950316] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522be663-e494-7e1f-cf1d-61f8182699f2" [ 1406.950316] env[63379]: _type = "Task" [ 1406.950316] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.959480] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522be663-e494-7e1f-cf1d-61f8182699f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.964135] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.998369] env[63379]: DEBUG nova.network.neutron [-] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.089893] env[63379]: DEBUG nova.compute.manager [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Received event network-changed-6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1407.090037] env[63379]: DEBUG nova.compute.manager [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Refreshing instance network info cache due to event network-changed-6b70ec9a-65bb-4a1c-9312-97031fc4fc46. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1407.090607] env[63379]: DEBUG oslo_concurrency.lockutils [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] Acquiring lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.091280] env[63379]: DEBUG oslo_concurrency.lockutils [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] Acquired lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.091280] env[63379]: DEBUG nova.network.neutron [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Refreshing network info cache for port 6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1407.230441] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779023, 'name': RelocateVM_Task, 'duration_secs': 0.629976} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.230747] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1407.230946] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369228', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'name': 'volume-263e34b9-3753-4240-8bd6-67c4019d79ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '15d19ce3-ea71-47ff-a738-9ba00b8dfcf1', 'attached_at': '', 'detached_at': '', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'serial': '263e34b9-3753-4240-8bd6-67c4019d79ae'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1407.231927] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d2b048-42c0-426e-9e95-7fa71fbb9b9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.261420] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214eb8f6-c6ae-421b-9cd9-d523d733fdc3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.277024] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667578} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.292456] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aaaf4b06-ef84-41ba-8054-29582854a9f1/aaaf4b06-ef84-41ba-8054-29582854a9f1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1407.292456] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1407.304040] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] volume-263e34b9-3753-4240-8bd6-67c4019d79ae/volume-263e34b9-3753-4240-8bd6-67c4019d79ae.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1407.304415] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4942ef7c-8bae-404c-8dfe-47a25a904cc4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.311756] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c219b5c5-e5b8-451e-80cd-ee3299357293 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.342281] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779025, 'name': CreateVM_Task, 'duration_secs': 0.465119} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.345051] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1407.345823] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1407.345823] env[63379]: value = "task-1779026" [ 1407.345823] env[63379]: _type = "Task" [ 1407.345823] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.346030] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1407.346030] env[63379]: value = "task-1779027" [ 1407.346030] env[63379]: _type = "Task" [ 1407.346030] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.346315] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.346529] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.346952] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1407.347436] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05f91a20-5093-45c8-bf08-2d7456b9ac21 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.362148] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1407.362148] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d154fe-432f-9b7c-5525-1b36100e8016" [ 1407.362148] env[63379]: _type = "Task" [ 1407.362148] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.372042] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.372147] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.390416] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d154fe-432f-9b7c-5525-1b36100e8016, 'name': SearchDatastore_Task, 'duration_secs': 0.016543} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.390839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.391170] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1407.391488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.467793] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522be663-e494-7e1f-cf1d-61f8182699f2, 'name': SearchDatastore_Task, 'duration_secs': 0.013481} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.467793] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.468412] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-298fc4ce-16e6-4e56-b60c-640b5793dfa8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.476705] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1407.476705] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527763e1-e68b-8ccd-4758-bada7a6d201e" [ 1407.476705] env[63379]: _type = "Task" [ 1407.476705] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.487866] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527763e1-e68b-8ccd-4758-bada7a6d201e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.500991] env[63379]: INFO nova.compute.manager [-] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Took 1.84 seconds to deallocate network for instance. [ 1407.818700] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2691481-9fff-4453-a55a-d9e61912b24f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.827720] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98697bd4-eb25-45b4-bb6d-63c25e212493 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.873515] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0718915a-03d1-4f5f-8e60-2ae6d17df91d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.884667] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131761} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.889438] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1407.889727] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1710ecaa-6fd0-4157-95ae-ef0978205bf2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.894221] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779027, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.894984] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a354fd-f72b-4ced-81e0-3e91e3a3e73b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.909567] env[63379]: DEBUG nova.compute.provider_tree [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1407.928855] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] aaaf4b06-ef84-41ba-8054-29582854a9f1/aaaf4b06-ef84-41ba-8054-29582854a9f1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1407.933044] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43ec0c41-e99d-4c8d-ba79-aad4ca957159 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.954164] env[63379]: DEBUG nova.compute.manager [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Received event network-changed-5bb23315-a5dc-438f-bb8e-fc90360f23ec {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1407.954164] env[63379]: DEBUG nova.compute.manager [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Refreshing instance network info cache due to event network-changed-5bb23315-a5dc-438f-bb8e-fc90360f23ec. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1407.954408] env[63379]: DEBUG oslo_concurrency.lockutils [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] Acquiring lock "refresh_cache-571bb238-9cf3-475e-b596-a9609acc8696" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.954490] env[63379]: DEBUG oslo_concurrency.lockutils [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] Acquired lock "refresh_cache-571bb238-9cf3-475e-b596-a9609acc8696" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.954683] env[63379]: DEBUG nova.network.neutron [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Refreshing network info cache for port 5bb23315-a5dc-438f-bb8e-fc90360f23ec {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1407.957899] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1407.957899] env[63379]: value = "task-1779028" [ 1407.957899] env[63379]: _type = "Task" [ 1407.957899] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.969496] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779028, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.990371] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527763e1-e68b-8ccd-4758-bada7a6d201e, 'name': SearchDatastore_Task, 'duration_secs': 0.021309} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.991180] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.991533] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 571bb238-9cf3-475e-b596-a9609acc8696/571bb238-9cf3-475e-b596-a9609acc8696.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.991843] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.992085] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1407.992323] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-949a2525-d1b9-42e4-8d27-e1c5a940b6f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.994512] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-163f3dfc-4d18-4a40-8e7b-c96bd85d3604 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.003542] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1408.003542] env[63379]: value = "task-1779029" [ 1408.003542] env[63379]: _type = "Task" [ 1408.003542] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.009351] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.009682] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1408.009899] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1408.011144] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5f4953e-f5bf-45d5-82a1-9eabb716ce81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.017148] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.021594] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1408.021594] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e8e1d2-d5e6-57af-c5ee-164945478dc7" [ 1408.021594] env[63379]: _type = "Task" [ 1408.021594] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.035791] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e8e1d2-d5e6-57af-c5ee-164945478dc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.210955] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Successfully updated port: 72ec59a1-3694-48aa-884a-9e0b1cebb603 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1408.239403] env[63379]: DEBUG nova.compute.manager [None req-58e2e5a9-8338-4026-a5d9-4ff58b6ded5d tempest-ServerDiagnosticsV248Test-551458049 tempest-ServerDiagnosticsV248Test-551458049-project-admin] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1408.242204] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05187876-4f4d-4ef4-9a35-10db17eb118e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.254443] env[63379]: INFO nova.compute.manager [None req-58e2e5a9-8338-4026-a5d9-4ff58b6ded5d tempest-ServerDiagnosticsV248Test-551458049 tempest-ServerDiagnosticsV248Test-551458049-project-admin] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Retrieving diagnostics [ 1408.255636] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c37365d-0bd2-4cfa-b9f1-84bfc2605bb9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.351508] env[63379]: DEBUG nova.network.neutron [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updated VIF entry in instance network info cache for port 6b70ec9a-65bb-4a1c-9312-97031fc4fc46. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1408.351870] env[63379]: DEBUG nova.network.neutron [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updating instance_info_cache with network_info: [{"id": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "address": "fa:16:3e:8f:e4:09", "network": {"id": "5cdac896-d067-4c9e-9ccd-954fce726e11", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-637047934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0e95e0ca1804616a5d258396749d295", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b70ec9a-65", "ovs_interfaceid": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.380541] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779027, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.434916] env[63379]: DEBUG nova.scheduler.client.report [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1408.473806] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779028, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.516858] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.537241] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e8e1d2-d5e6-57af-c5ee-164945478dc7, 'name': SearchDatastore_Task, 'duration_secs': 0.014221} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.538213] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a512d44-bf5e-4dea-b869-c64aa0042a90 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.545545] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1408.545545] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527f3648-597d-3388-1aaa-7dce31127600" [ 1408.545545] env[63379]: _type = "Task" [ 1408.545545] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.555584] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527f3648-597d-3388-1aaa-7dce31127600, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.713781] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "refresh_cache-d221329b-eee4-42f5-bb27-cf6af0386c04" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.713923] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "refresh_cache-d221329b-eee4-42f5-bb27-cf6af0386c04" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.714115] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1408.854865] env[63379]: DEBUG oslo_concurrency.lockutils [req-6bda3205-a64b-4196-bb8b-86c92d55cbbc req-06979c01-8644-402f-9160-e2d42edd47e1 service nova] Releasing lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.630448] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.265s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.631607] env[63379]: DEBUG nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1409.636027] env[63379]: DEBUG nova.network.neutron [-] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.639939] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779027, 'name': ReconfigVM_Task, 'duration_secs': 1.487768} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.639939] env[63379]: DEBUG nova.network.neutron [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Updated VIF entry in instance network info cache for port 5bb23315-a5dc-438f-bb8e-fc90360f23ec. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1409.639939] env[63379]: DEBUG nova.network.neutron [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Updating instance_info_cache with network_info: [{"id": "5bb23315-a5dc-438f-bb8e-fc90360f23ec", "address": "fa:16:3e:de:b8:cd", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bb23315-a5", "ovs_interfaceid": "5bb23315-a5dc-438f-bb8e-fc90360f23ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.641912] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.620s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.643367] env[63379]: INFO nova.compute.claims [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1409.656400] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Reconfigured VM instance instance-00000010 to attach disk [datastore1] volume-263e34b9-3753-4240-8bd6-67c4019d79ae/volume-263e34b9-3753-4240-8bd6-67c4019d79ae.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.665486] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c522b6a4-3bca-4ba4-aa47-709e5a81a1ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.691038] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527f3648-597d-3388-1aaa-7dce31127600, 'name': SearchDatastore_Task, 'duration_secs': 0.017458} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.691038] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779029, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.346912} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.691038] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779028, 'name': ReconfigVM_Task, 'duration_secs': 0.569413} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.693696] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.693964] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1409.694286] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 571bb238-9cf3-475e-b596-a9609acc8696/571bb238-9cf3-475e-b596-a9609acc8696.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1409.694490] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1409.696063] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Reconfigured VM instance instance-0000000f to attach disk [datastore1] aaaf4b06-ef84-41ba-8054-29582854a9f1/aaaf4b06-ef84-41ba-8054-29582854a9f1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.696063] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1409.696063] env[63379]: value = "task-1779030" [ 1409.696063] env[63379]: _type = "Task" [ 1409.696063] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.696400] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5a666ba-7521-49ec-9f88-c5ba505e0227 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.698218] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3893cfb7-c0f0-4249-b218-95f040e9e117 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.700948] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6a27562-2005-40fb-a8c2-a2f0366fdb44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.704546] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52475826-aa83-3f8d-9bba-4be5eade29bf/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1409.708445] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0c24ba-5f96-4276-be63-3f78192f9015 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.725414] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.725826] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52475826-aa83-3f8d-9bba-4be5eade29bf/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1409.726050] env[63379]: ERROR oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52475826-aa83-3f8d-9bba-4be5eade29bf/disk-0.vmdk due to incomplete transfer. [ 1409.726531] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1409.726531] env[63379]: value = "task-1779032" [ 1409.726531] env[63379]: _type = "Task" [ 1409.726531] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.726782] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1409.726782] env[63379]: value = "task-1779031" [ 1409.726782] env[63379]: _type = "Task" [ 1409.726782] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.728155] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-433c5731-33a0-44f4-84f4-236e043e91e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.730704] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1409.730704] env[63379]: value = "task-1779033" [ 1409.730704] env[63379]: _type = "Task" [ 1409.730704] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.747578] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52475826-aa83-3f8d-9bba-4be5eade29bf/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1409.747921] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Uploaded image b4d4e2bd-9da6-4e0b-9f28-188f22313c1d to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1409.750179] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1409.761673] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-733eb816-26d0-4db7-8654-6e97383a9b27 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.763168] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779031, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.763612] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779033, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.763701] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.771436] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1409.771436] env[63379]: value = "task-1779034" [ 1409.771436] env[63379]: _type = "Task" [ 1409.771436] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.785716] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779034, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.949166] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1410.144630] env[63379]: DEBUG nova.compute.utils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1410.146846] env[63379]: INFO nova.compute.manager [-] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Took 3.40 seconds to deallocate network for instance. [ 1410.149863] env[63379]: DEBUG nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1410.150024] env[63379]: DEBUG nova.network.neutron [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1410.211740] env[63379]: DEBUG oslo_concurrency.lockutils [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] Releasing lock "refresh_cache-571bb238-9cf3-475e-b596-a9609acc8696" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.212032] env[63379]: DEBUG nova.compute.manager [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Received event network-changed-1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1410.213538] env[63379]: DEBUG nova.compute.manager [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Refreshing instance network info cache due to event network-changed-1913f18a-c402-444f-bfec-50a3ab88167d. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1410.213538] env[63379]: DEBUG oslo_concurrency.lockutils [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] Acquiring lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.213538] env[63379]: DEBUG oslo_concurrency.lockutils [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] Acquired lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.213538] env[63379]: DEBUG nova.network.neutron [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Refreshing network info cache for port 1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.220868] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779030, 'name': ReconfigVM_Task, 'duration_secs': 0.152291} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.223601] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369228', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'name': 'volume-263e34b9-3753-4240-8bd6-67c4019d79ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '15d19ce3-ea71-47ff-a738-9ba00b8dfcf1', 'attached_at': '', 'detached_at': '', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'serial': '263e34b9-3753-4240-8bd6-67c4019d79ae'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1410.225084] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b5fff26-4d18-4df8-8d68-17f30f08dbb6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.244506] env[63379]: DEBUG nova.compute.manager [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Received event network-vif-plugged-72ec59a1-3694-48aa-884a-9e0b1cebb603 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1410.244720] env[63379]: DEBUG oslo_concurrency.lockutils [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] Acquiring lock "d221329b-eee4-42f5-bb27-cf6af0386c04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.245048] env[63379]: DEBUG oslo_concurrency.lockutils [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.245133] env[63379]: DEBUG oslo_concurrency.lockutils [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.245375] env[63379]: DEBUG nova.compute.manager [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] No waiting events found dispatching network-vif-plugged-72ec59a1-3694-48aa-884a-9e0b1cebb603 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1410.245533] env[63379]: WARNING nova.compute.manager [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Received unexpected event network-vif-plugged-72ec59a1-3694-48aa-884a-9e0b1cebb603 for instance with vm_state building and task_state spawning. [ 1410.245705] env[63379]: DEBUG nova.compute.manager [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Received event network-changed-72ec59a1-3694-48aa-884a-9e0b1cebb603 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1410.245841] env[63379]: DEBUG nova.compute.manager [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Refreshing instance network info cache due to event network-changed-72ec59a1-3694-48aa-884a-9e0b1cebb603. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1410.246021] env[63379]: DEBUG oslo_concurrency.lockutils [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] Acquiring lock "refresh_cache-d221329b-eee4-42f5-bb27-cf6af0386c04" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.247023] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1410.247023] env[63379]: value = "task-1779035" [ 1410.247023] env[63379]: _type = "Task" [ 1410.247023] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.252612] env[63379]: DEBUG nova.network.neutron [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Updating instance_info_cache with network_info: [{"id": "72ec59a1-3694-48aa-884a-9e0b1cebb603", "address": "fa:16:3e:3a:16:33", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72ec59a1-36", "ovs_interfaceid": "72ec59a1-3694-48aa-884a-9e0b1cebb603", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.260379] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779032, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.264847] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779031, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076764} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.265500] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1410.266437] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b2d9a4-93e6-4b9e-91f9-a6acda984bdd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.275829] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779035, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.276499] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779033, 'name': Rename_Task, 'duration_secs': 0.257024} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.280855] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1410.292727] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb8974f2-4686-4532-8c2f-365adfb16525 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.303106] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 571bb238-9cf3-475e-b596-a9609acc8696/571bb238-9cf3-475e-b596-a9609acc8696.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1410.304518] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afa9e259-2975-4484-852b-7bc9fb3f8e42 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.323253] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779034, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.330946] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1410.330946] env[63379]: value = "task-1779036" [ 1410.330946] env[63379]: _type = "Task" [ 1410.330946] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.330946] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1410.330946] env[63379]: value = "task-1779037" [ 1410.330946] env[63379]: _type = "Task" [ 1410.330946] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.343492] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.347044] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779037, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.366916] env[63379]: DEBUG nova.policy [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '705e4dcdc706493785b4a61ffc95d25e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62dd639e7cfb49ce85b2950d8191c024', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1410.446641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.446641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.446641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.446641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.446641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.453667] env[63379]: INFO nova.compute.manager [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Terminating instance [ 1410.457938] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "refresh_cache-ae565930-1bbc-4e75-bfc1-25dbcfd2e999" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.457938] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquired lock "refresh_cache-ae565930-1bbc-4e75-bfc1-25dbcfd2e999" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.457938] env[63379]: DEBUG nova.network.neutron [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1410.652267] env[63379]: DEBUG nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1410.665307] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.753296] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779032, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590488} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.762555] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1410.763013] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1410.764633] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "refresh_cache-d221329b-eee4-42f5-bb27-cf6af0386c04" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.765043] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Instance network_info: |[{"id": "72ec59a1-3694-48aa-884a-9e0b1cebb603", "address": "fa:16:3e:3a:16:33", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72ec59a1-36", "ovs_interfaceid": "72ec59a1-3694-48aa-884a-9e0b1cebb603", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1410.765318] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf7423a5-26ef-4455-b462-fdd30569f07c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.767759] env[63379]: DEBUG oslo_concurrency.lockutils [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] Acquired lock "refresh_cache-d221329b-eee4-42f5-bb27-cf6af0386c04" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.768555] env[63379]: DEBUG nova.network.neutron [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Refreshing network info cache for port 72ec59a1-3694-48aa-884a-9e0b1cebb603 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.769266] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:16:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72ec59a1-3694-48aa-884a-9e0b1cebb603', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1410.778813] env[63379]: DEBUG oslo.service.loopingcall [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1410.779166] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1410.785615] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a2e2a63-7e16-4990-a4f7-7f9d2ea3e5e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.811250] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779035, 'name': Rename_Task, 'duration_secs': 0.191027} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.813890] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1410.813890] env[63379]: value = "task-1779038" [ 1410.813890] env[63379]: _type = "Task" [ 1410.813890] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.813890] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1410.814582] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d8271f2-e7a0-497a-9354-a99140738eb6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.824401] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779034, 'name': Destroy_Task, 'duration_secs': 0.727912} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.824682] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1410.824682] env[63379]: value = "task-1779039" [ 1410.824682] env[63379]: _type = "Task" [ 1410.824682] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.828069] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Destroyed the VM [ 1410.828448] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1410.829919] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fce7e7c1-bf71-4f51-a2a7-455198761f40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.838524] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1410.838524] env[63379]: value = "task-1779040" [ 1410.838524] env[63379]: _type = "Task" [ 1410.838524] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.838789] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779038, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.855804] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779039, 'name': CreateVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.858595] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1410.858595] env[63379]: value = "task-1779041" [ 1410.858595] env[63379]: _type = "Task" [ 1410.858595] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.873582] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.873582] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779040, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.873721] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.882318] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779041, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.008970] env[63379]: DEBUG nova.network.neutron [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1411.162449] env[63379]: DEBUG nova.network.neutron [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.196384] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946af0cd-9279-4ba6-aa80-355eb73b235b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.213554] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b01228-c9de-48ed-9369-0c1f50662493 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.259060] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d98a2de-e475-4a69-ae64-d421c20bc073 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.263208] env[63379]: DEBUG nova.network.neutron [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Successfully created port: a524aedc-254a-4394-836b-4136823591d8 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.271903] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a71958-dde3-45fc-b374-1ddb271df0d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.290548] env[63379]: DEBUG nova.compute.provider_tree [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.316991] env[63379]: DEBUG nova.compute.manager [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-vif-deleted-c856b8f8-3490-43b2-b2c2-b96a5c3e550e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1411.317296] env[63379]: DEBUG nova.compute.manager [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-vif-deleted-b26a8dba-cd30-4320-901e-8e9a8584ea6f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1411.317526] env[63379]: DEBUG nova.compute.manager [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Received event network-vif-deleted-cb52a59c-c52f-446e-b305-8cbd08c646d1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1411.317737] env[63379]: DEBUG nova.compute.manager [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Received event network-changed-65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1411.318085] env[63379]: DEBUG nova.compute.manager [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Refreshing instance network info cache due to event network-changed-65e3bc3b-bfed-4dd6-be59-87481a211014. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1411.318372] env[63379]: DEBUG oslo_concurrency.lockutils [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] Acquiring lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.318623] env[63379]: DEBUG oslo_concurrency.lockutils [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] Acquired lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.318954] env[63379]: DEBUG nova.network.neutron [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Refreshing network info cache for port 65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1411.336314] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779038, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072786} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.337663] env[63379]: DEBUG nova.network.neutron [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Updated VIF entry in instance network info cache for port 1913f18a-c402-444f-bfec-50a3ab88167d. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1411.338207] env[63379]: DEBUG nova.network.neutron [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Updating instance_info_cache with network_info: [{"id": "1913f18a-c402-444f-bfec-50a3ab88167d", "address": "fa:16:3e:17:52:16", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1913f18a-c4", "ovs_interfaceid": "1913f18a-c402-444f-bfec-50a3ab88167d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.342814] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1411.345354] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368ee88c-d912-4f8e-af9a-c39f4f233d4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.363014] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779039, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.381310] env[63379]: DEBUG oslo_vmware.api [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779036, 'name': PowerOnVM_Task, 'duration_secs': 0.990614} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.390646] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1411.391357] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779037, 'name': ReconfigVM_Task, 'duration_secs': 0.784266} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.398164] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1411.398684] env[63379]: INFO nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Took 15.01 seconds to spawn the instance on the hypervisor. [ 1411.398684] env[63379]: DEBUG nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1411.400290] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d62335f8-75e8-45a1-8bd7-8d732331b34a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.415653] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 571bb238-9cf3-475e-b596-a9609acc8696/571bb238-9cf3-475e-b596-a9609acc8696.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1411.416449] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.417730] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d572acd-569d-4036-a066-ffbef89dea50 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.421575] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84bd4de7-3d97-4c94-9985-e31ad2e39ec7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.428973] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779041, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.440187] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1411.440187] env[63379]: value = "task-1779042" [ 1411.440187] env[63379]: _type = "Task" [ 1411.440187] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.440529] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1411.440529] env[63379]: value = "task-1779043" [ 1411.440529] env[63379]: _type = "Task" [ 1411.440529] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.457708] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779043, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.463413] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.669226] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Releasing lock "refresh_cache-ae565930-1bbc-4e75-bfc1-25dbcfd2e999" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.671198] env[63379]: DEBUG nova.compute.manager [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1411.673019] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1411.674039] env[63379]: DEBUG nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1411.680767] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064cdf75-3ba4-4014-8a03-2a8bd2df6f62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.691171] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1411.692964] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17336734-963f-472f-a4ec-4acba57f853c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.707911] env[63379]: DEBUG oslo_vmware.api [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1411.707911] env[63379]: value = "task-1779044" [ 1411.707911] env[63379]: _type = "Task" [ 1411.707911] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.723261] env[63379]: DEBUG oslo_vmware.api [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1779044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.728491] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1411.728821] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1411.729479] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1411.730623] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1411.730863] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1411.731627] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1411.733294] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1411.733294] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1411.733294] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1411.733601] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1411.734068] env[63379]: DEBUG nova.virt.hardware [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1411.735894] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd557983-59e5-4387-a413-c50397fcbe61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.748111] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ad354e-50ca-4abc-a4f9-99aa4aea07df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.774180] env[63379]: DEBUG nova.network.neutron [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Updated VIF entry in instance network info cache for port 72ec59a1-3694-48aa-884a-9e0b1cebb603. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1411.774715] env[63379]: DEBUG nova.network.neutron [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Updating instance_info_cache with network_info: [{"id": "72ec59a1-3694-48aa-884a-9e0b1cebb603", "address": "fa:16:3e:3a:16:33", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72ec59a1-36", "ovs_interfaceid": "72ec59a1-3694-48aa-884a-9e0b1cebb603", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.794715] env[63379]: DEBUG nova.scheduler.client.report [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1411.852813] env[63379]: DEBUG oslo_concurrency.lockutils [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] Releasing lock "refresh_cache-bf0dd3cf-684c-4378-a89c-5b9f16df062d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1411.853277] env[63379]: DEBUG nova.compute.manager [req-91682a74-7550-4dcb-94d8-bc52b8298507 req-c1015b49-fd80-4552-ab28-f949e584666c service nova] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Received event network-vif-deleted-a7d101c2-09da-4502-aa7a-988de81f6ee7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1411.853667] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779039, 'name': CreateVM_Task, 'duration_secs': 0.66307} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.854231] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1411.855298] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.855298] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.855485] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1411.859959] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7835f44f-7769-4b45-9e34-833a1b23c3de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.874768] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.875203] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1411.875203] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e1bac-191f-6a3b-f3ad-559e0bdff60d" [ 1411.875203] env[63379]: _type = "Task" [ 1411.875203] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.885421] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779041, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.894891] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e1bac-191f-6a3b-f3ad-559e0bdff60d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.961391] env[63379]: INFO nova.compute.manager [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Took 27.34 seconds to build instance. [ 1411.971428] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779042, 'name': ReconfigVM_Task, 'duration_secs': 0.356388} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.971428] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779043, 'name': Rename_Task, 'duration_secs': 0.278589} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.973495] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Reconfigured VM instance instance-00000006 to attach disk [datastore1] a6f7c217-a493-403d-b776-870df4575f2a/a6f7c217-a493-403d-b776-870df4575f2a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1411.974260] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1411.974523] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb7e7b90-47b1-442c-b85b-da4bfa9586b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.976200] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e350381d-dd65-4749-887f-56357b53c7f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.985849] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1411.985849] env[63379]: value = "task-1779046" [ 1411.985849] env[63379]: _type = "Task" [ 1411.985849] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.987525] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1411.987525] env[63379]: value = "task-1779045" [ 1411.987525] env[63379]: _type = "Task" [ 1411.987525] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.010260] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.011447] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779046, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.220091] env[63379]: DEBUG oslo_vmware.api [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1779044, 'name': PowerOffVM_Task, 'duration_secs': 0.29659} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.220433] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1412.220677] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1412.220945] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-884301fd-b964-4ceb-b76f-1b5cb4d5dada {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.265359] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1412.265709] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1412.265921] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Deleting the datastore file [datastore1] ae565930-1bbc-4e75-bfc1-25dbcfd2e999 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1412.268261] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-323ca71c-0ff2-48ac-955c-a802d784716a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.274959] env[63379]: DEBUG oslo_vmware.api [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for the task: (returnval){ [ 1412.274959] env[63379]: value = "task-1779048" [ 1412.274959] env[63379]: _type = "Task" [ 1412.274959] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.278876] env[63379]: DEBUG oslo_concurrency.lockutils [req-d3eb7c23-50f4-4d8c-b8a9-b92fd5c6da8a req-d6cd9083-b765-4474-8f6d-6bc0edcb5691 service nova] Releasing lock "refresh_cache-d221329b-eee4-42f5-bb27-cf6af0386c04" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.286306] env[63379]: DEBUG oslo_vmware.api [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1779048, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.299285] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.299773] env[63379]: DEBUG nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1412.305944] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.800s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.307419] env[63379]: INFO nova.compute.claims [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1412.372866] env[63379]: DEBUG oslo_vmware.api [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779040, 'name': PowerOnVM_Task, 'duration_secs': 1.37314} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.373658] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1412.373974] env[63379]: INFO nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Took 11.52 seconds to spawn the instance on the hypervisor. [ 1412.374266] env[63379]: DEBUG nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1412.375181] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2375df4-c31d-443a-af1d-318253e231be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.382807] env[63379]: DEBUG oslo_vmware.api [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779041, 'name': RemoveSnapshot_Task, 'duration_secs': 1.363132} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.386553] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1412.386553] env[63379]: INFO nova.compute.manager [None req-cff8885b-00c4-464d-91a5-a42d5693a8f4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Took 23.13 seconds to snapshot the instance on the hypervisor. [ 1412.403761] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e1bac-191f-6a3b-f3ad-559e0bdff60d, 'name': SearchDatastore_Task, 'duration_secs': 0.03713} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.403761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.403761] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1412.403761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.403761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.403761] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1412.403761] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19b60c90-3ede-43db-97ee-f9d6973ca05d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.413872] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1412.414203] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1412.414995] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3589583-ad51-4466-b79f-9e75515fa96f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.421126] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1412.421126] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c3557b-270e-a9fe-3a12-c80ddc151198" [ 1412.421126] env[63379]: _type = "Task" [ 1412.421126] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.431884] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c3557b-270e-a9fe-3a12-c80ddc151198, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.472868] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e6b4bf37-2d37-4b0e-9dd0-6bd63ac0e3e6 tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.946s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.502999] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779046, 'name': Rename_Task, 'duration_secs': 0.160011} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.507181] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1412.507181] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779045, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.507635] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8538e719-4c14-4f89-91fd-e47a440f2554 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.515543] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Waiting for the task: (returnval){ [ 1412.515543] env[63379]: value = "task-1779049" [ 1412.515543] env[63379]: _type = "Task" [ 1412.515543] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.525396] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779049, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.622435] env[63379]: DEBUG nova.network.neutron [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updated VIF entry in instance network info cache for port 65e3bc3b-bfed-4dd6-be59-87481a211014. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.623175] env[63379]: DEBUG nova.network.neutron [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updating instance_info_cache with network_info: [{"id": "65e3bc3b-bfed-4dd6-be59-87481a211014", "address": "fa:16:3e:cf:dd:6f", "network": {"id": "66f99999-2093-485c-98b2-12d4a173be2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1040360959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8ea1d9b2b194236ac9e91082b291b97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e3bc3b-bf", "ovs_interfaceid": "65e3bc3b-bfed-4dd6-be59-87481a211014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.787943] env[63379]: DEBUG oslo_vmware.api [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Task: {'id': task-1779048, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207895} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.789404] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1412.789404] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1412.789404] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1412.790055] env[63379]: INFO nova.compute.manager [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1412.790485] env[63379]: DEBUG oslo.service.loopingcall [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.790806] env[63379]: DEBUG nova.compute.manager [-] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1412.791717] env[63379]: DEBUG nova.network.neutron [-] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1412.815022] env[63379]: DEBUG nova.compute.utils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1412.816793] env[63379]: DEBUG nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1412.817144] env[63379]: DEBUG nova.network.neutron [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1412.824326] env[63379]: DEBUG nova.network.neutron [-] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1412.908160] env[63379]: INFO nova.compute.manager [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Took 28.20 seconds to build instance. [ 1412.939166] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c3557b-270e-a9fe-3a12-c80ddc151198, 'name': SearchDatastore_Task, 'duration_secs': 0.012015} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.939963] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38bb90a0-841b-4614-bf56-c130b6e6eea8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.946555] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1412.946555] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d69ecb-e745-d0db-0950-fa305c188256" [ 1412.946555] env[63379]: _type = "Task" [ 1412.946555] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.955972] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d69ecb-e745-d0db-0950-fa305c188256, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.969211] env[63379]: DEBUG nova.policy [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3dfe4591337c4edaa169743fcdf0c4c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0133567c115648aa9f6ee1c5adee833b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1412.974462] env[63379]: DEBUG nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1413.005740] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779045, 'name': PowerOnVM_Task, 'duration_secs': 0.726284} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.006313] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1413.006633] env[63379]: INFO nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Took 11.41 seconds to spawn the instance on the hypervisor. [ 1413.006906] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1413.007996] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed4d2ed-afd8-4563-a3cb-13845e85fe2d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.028519] env[63379]: DEBUG oslo_vmware.api [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Task: {'id': task-1779049, 'name': PowerOnVM_Task, 'duration_secs': 0.492225} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.029624] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1413.029846] env[63379]: DEBUG nova.compute.manager [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1413.030929] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a97fd8c-945e-4582-96d1-d1165fa911a4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.129332] env[63379]: DEBUG oslo_concurrency.lockutils [req-babda3d4-15a5-49f0-8f39-b7332dc924c2 req-cb19919d-2f6d-499a-8d03-42c72c3bd6d2 service nova] Releasing lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.317624] env[63379]: DEBUG nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1413.326803] env[63379]: DEBUG nova.network.neutron [-] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.411870] env[63379]: DEBUG oslo_concurrency.lockutils [None req-280f42dc-c22b-4a76-bd85-df3193721432 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.340s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.463950] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d69ecb-e745-d0db-0950-fa305c188256, 'name': SearchDatastore_Task, 'duration_secs': 0.015346} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.463950] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.463950] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d221329b-eee4-42f5-bb27-cf6af0386c04/d221329b-eee4-42f5-bb27-cf6af0386c04.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1413.463950] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11875d6d-0ae3-4111-9f21-f20870939385 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.472703] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1413.472703] env[63379]: value = "task-1779050" [ 1413.472703] env[63379]: _type = "Task" [ 1413.472703] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.486324] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.501866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.535565] env[63379]: INFO nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Took 26.43 seconds to build instance. [ 1413.548606] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.613606] env[63379]: DEBUG nova.network.neutron [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Successfully updated port: a524aedc-254a-4394-836b-4136823591d8 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.644722] env[63379]: DEBUG nova.network.neutron [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Successfully created port: 55f75417-a04f-44de-a21a-20527e069280 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.753965] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5e62c0-fcbe-4f32-99d2-5cf0e6ba371d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.765964] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6e6a0a-a415-4715-b3e6-e1a299e6b781 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.808142] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05bafb6-0257-4cfb-ad47-d7f96dca015c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.818705] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecc653f-d3ed-454f-a51d-1f7d7d82b592 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.837525] env[63379]: INFO nova.compute.manager [-] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Took 1.05 seconds to deallocate network for instance. [ 1413.839042] env[63379]: DEBUG nova.compute.provider_tree [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.917349] env[63379]: DEBUG nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1413.989567] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779050, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.037826] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "571bb238-9cf3-475e-b596-a9609acc8696" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.776s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.115333] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.115550] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquired lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.115794] env[63379]: DEBUG nova.network.neutron [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.191204] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.191664] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.266275] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "0aab61e4-c055-4872-973a-20fa6802ec10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.266704] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.266990] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "0aab61e4-c055-4872-973a-20fa6802ec10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.267217] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.267458] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.270663] env[63379]: INFO nova.compute.manager [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Terminating instance [ 1414.273814] env[63379]: DEBUG nova.compute.manager [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1414.274090] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1414.275574] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a37171-57da-43ce-8cba-32e6e4a3e186 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.286626] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1414.287083] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28d60002-67d4-4b40-a9b8-fb875fa2f15b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.341922] env[63379]: DEBUG nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1414.347876] env[63379]: DEBUG nova.scheduler.client.report [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1414.352206] env[63379]: DEBUG nova.compute.manager [req-26489a6c-ab63-4660-bd68-fc0134a33fc7 req-63c62229-a686-439f-8252-09a6aca9fb3d service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Received event network-vif-plugged-a524aedc-254a-4394-836b-4136823591d8 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1414.355033] env[63379]: DEBUG oslo_concurrency.lockutils [req-26489a6c-ab63-4660-bd68-fc0134a33fc7 req-63c62229-a686-439f-8252-09a6aca9fb3d service nova] Acquiring lock "6b4e80fc-582f-432b-aa99-ec133127578e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.355033] env[63379]: DEBUG oslo_concurrency.lockutils [req-26489a6c-ab63-4660-bd68-fc0134a33fc7 req-63c62229-a686-439f-8252-09a6aca9fb3d service nova] Lock "6b4e80fc-582f-432b-aa99-ec133127578e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.355033] env[63379]: DEBUG oslo_concurrency.lockutils [req-26489a6c-ab63-4660-bd68-fc0134a33fc7 req-63c62229-a686-439f-8252-09a6aca9fb3d service nova] Lock "6b4e80fc-582f-432b-aa99-ec133127578e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.355033] env[63379]: DEBUG nova.compute.manager [req-26489a6c-ab63-4660-bd68-fc0134a33fc7 req-63c62229-a686-439f-8252-09a6aca9fb3d service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] No waiting events found dispatching network-vif-plugged-a524aedc-254a-4394-836b-4136823591d8 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1414.355033] env[63379]: WARNING nova.compute.manager [req-26489a6c-ab63-4660-bd68-fc0134a33fc7 req-63c62229-a686-439f-8252-09a6aca9fb3d service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Received unexpected event network-vif-plugged-a524aedc-254a-4394-836b-4136823591d8 for instance with vm_state building and task_state spawning. [ 1414.355033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.384251] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1414.384645] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1414.384851] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.385180] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1414.385442] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.385620] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1414.385972] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1414.386256] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1414.386563] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1414.386906] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1414.387059] env[63379]: DEBUG nova.virt.hardware [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1414.389366] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c22dc7-3792-44ab-9df4-0a10e5ab116a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.396194] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1414.396412] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1414.396594] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] 0aab61e4-c055-4872-973a-20fa6802ec10 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1414.397337] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27f61af2-68f0-4db9-85da-71002dfe06cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.403605] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efae2b82-51b6-46f3-9435-457b88e0cec6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.409171] env[63379]: DEBUG oslo_vmware.api [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1414.409171] env[63379]: value = "task-1779052" [ 1414.409171] env[63379]: _type = "Task" [ 1414.409171] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.429981] env[63379]: DEBUG oslo_vmware.api [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.447590] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.485590] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668906} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.485875] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d221329b-eee4-42f5-bb27-cf6af0386c04/d221329b-eee4-42f5-bb27-cf6af0386c04.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1414.486103] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1414.486393] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c0e76bc-ac5e-4852-b918-d1c6ea7a93c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.495499] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1414.495499] env[63379]: value = "task-1779053" [ 1414.495499] env[63379]: _type = "Task" [ 1414.495499] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.504541] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.694695] env[63379]: DEBUG nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1414.710518] env[63379]: DEBUG nova.network.neutron [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.856629] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.857141] env[63379]: DEBUG nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1414.860735] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.310s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.860985] env[63379]: DEBUG nova.objects.instance [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1414.923485] env[63379]: DEBUG oslo_vmware.api [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335565} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.924652] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1414.924910] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1414.925173] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1414.925390] env[63379]: INFO nova.compute.manager [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1414.925637] env[63379]: DEBUG oslo.service.loopingcall [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.929398] env[63379]: DEBUG nova.compute.manager [-] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1414.929398] env[63379]: DEBUG nova.network.neutron [-] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1415.009155] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067604} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.009454] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.010514] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cba2b5-2cf0-47f3-8291-8d9fc2103161 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.039698] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] d221329b-eee4-42f5-bb27-cf6af0386c04/d221329b-eee4-42f5-bb27-cf6af0386c04.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.041273] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44a4cbca-1372-40b0-ad69-1c67279d4b90 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.065266] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1415.065266] env[63379]: value = "task-1779054" [ 1415.065266] env[63379]: _type = "Task" [ 1415.065266] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.078309] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779054, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.118620] env[63379]: DEBUG nova.network.neutron [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Updating instance_info_cache with network_info: [{"id": "a524aedc-254a-4394-836b-4136823591d8", "address": "fa:16:3e:63:fe:72", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa524aedc-25", "ovs_interfaceid": "a524aedc-254a-4394-836b-4136823591d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.225124] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.372906] env[63379]: DEBUG nova.compute.utils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1415.374628] env[63379]: DEBUG nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1415.374888] env[63379]: DEBUG nova.network.neutron [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1415.432665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "a6f7c217-a493-403d-b776-870df4575f2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.432888] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "a6f7c217-a493-403d-b776-870df4575f2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.433164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "a6f7c217-a493-403d-b776-870df4575f2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.433468] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "a6f7c217-a493-403d-b776-870df4575f2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.434342] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "a6f7c217-a493-403d-b776-870df4575f2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.436839] env[63379]: INFO nova.compute.manager [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Terminating instance [ 1415.438497] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "refresh_cache-a6f7c217-a493-403d-b776-870df4575f2a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.439144] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquired lock "refresh_cache-a6f7c217-a493-403d-b776-870df4575f2a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.439144] env[63379]: DEBUG nova.network.neutron [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1415.503082] env[63379]: DEBUG nova.policy [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09e9b61d30e14cdaa8ddfbde0641622e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e0420b33d0e4236b9750b79d94e57af', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1415.581383] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779054, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.621995] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Releasing lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.621995] env[63379]: DEBUG nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Instance network_info: |[{"id": "a524aedc-254a-4394-836b-4136823591d8", "address": "fa:16:3e:63:fe:72", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa524aedc-25", "ovs_interfaceid": "a524aedc-254a-4394-836b-4136823591d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1415.622586] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:fe:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a524aedc-254a-4394-836b-4136823591d8', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.631014] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Creating folder: Project (62dd639e7cfb49ce85b2950d8191c024). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.631344] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f444ddde-08a6-4f27-abcf-40760539896f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.646332] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Created folder: Project (62dd639e7cfb49ce85b2950d8191c024) in parent group-v369214. [ 1415.646661] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Creating folder: Instances. Parent ref: group-v369270. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.646828] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-002067fb-b791-4d09-8bb3-85b137cbb3a4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.660838] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Created folder: Instances in parent group-v369270. [ 1415.661163] env[63379]: DEBUG oslo.service.loopingcall [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.661467] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.661701] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96211394-3bdb-4a3d-aad0-5c10b26d5e2c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.687508] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.687508] env[63379]: value = "task-1779057" [ 1415.687508] env[63379]: _type = "Task" [ 1415.687508] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.697800] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779057, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.876327] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a071efaf-4413-4267-96c0-0167a1df9310 tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.877705] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.312s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.880388] env[63379]: DEBUG nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1416.006011] env[63379]: DEBUG nova.network.neutron [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1416.080348] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779054, 'name': ReconfigVM_Task, 'duration_secs': 0.685423} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.080591] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Reconfigured VM instance instance-00000012 to attach disk [datastore1] d221329b-eee4-42f5-bb27-cf6af0386c04/d221329b-eee4-42f5-bb27-cf6af0386c04.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1416.081402] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-345d94a9-8f3b-4756-8406-3991e606435e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.089683] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1416.089683] env[63379]: value = "task-1779058" [ 1416.089683] env[63379]: _type = "Task" [ 1416.089683] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.102321] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779058, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.200720] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779057, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.261966] env[63379]: DEBUG nova.network.neutron [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.387429] env[63379]: INFO nova.compute.claims [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1416.605501] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779058, 'name': Rename_Task, 'duration_secs': 0.323832} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.606224] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1416.606586] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87286ece-e77c-4da4-a639-0b6cabf1df85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.616028] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1416.616028] env[63379]: value = "task-1779059" [ 1416.616028] env[63379]: _type = "Task" [ 1416.616028] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.626494] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.701446] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779057, 'name': CreateVM_Task, 'duration_secs': 0.607928} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.702920] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1416.704175] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.704267] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.704588] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1416.704917] env[63379]: DEBUG nova.network.neutron [-] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.706229] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f82f6ba-dac4-4cee-a501-5107d86d7680 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.713037] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1416.713037] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d43290-4511-94b6-ea16-e2429d56f45a" [ 1416.713037] env[63379]: _type = "Task" [ 1416.713037] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.727188] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d43290-4511-94b6-ea16-e2429d56f45a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.765542] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Releasing lock "refresh_cache-a6f7c217-a493-403d-b776-870df4575f2a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.766265] env[63379]: DEBUG nova.compute.manager [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1416.766757] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1416.768083] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687214a7-949d-45d0-a005-daf8c6a6c4d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.779437] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1416.779776] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc8a3760-93b2-4c05-88b8-8040ab407a68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.790709] env[63379]: DEBUG oslo_vmware.api [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1416.790709] env[63379]: value = "task-1779060" [ 1416.790709] env[63379]: _type = "Task" [ 1416.790709] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.796043] env[63379]: DEBUG nova.network.neutron [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Successfully created port: c6139085-d9e8-416c-8a48-9c9e3c07eed1 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1416.805153] env[63379]: DEBUG oslo_vmware.api [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1779060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.903626] env[63379]: INFO nova.compute.resource_tracker [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating resource usage from migration 786df903-0cb0-4f51-a75d-824fa35e0a15 [ 1416.908517] env[63379]: DEBUG nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1416.955314] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1416.955314] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1416.955577] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1416.956273] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1416.956622] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1416.957187] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1416.958028] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1416.958028] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1416.958028] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1416.958430] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1416.958765] env[63379]: DEBUG nova.virt.hardware [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1416.960606] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf3d79e-c0b5-48df-b7db-92a927547822 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.973869] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5284ab6-9f85-4fc4-ba29-6bb9554e57ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.004521] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "48c0d20e-adc4-40a9-888c-ffea363f6edb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.005013] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.082177] env[63379]: DEBUG nova.network.neutron [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Successfully updated port: 55f75417-a04f-44de-a21a-20527e069280 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1417.130619] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779059, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.208755] env[63379]: INFO nova.compute.manager [-] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Took 2.28 seconds to deallocate network for instance. [ 1417.228024] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d43290-4511-94b6-ea16-e2429d56f45a, 'name': SearchDatastore_Task, 'duration_secs': 0.019143} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.228024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.228024] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.228024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.228024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.228024] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.228024] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7b9c02f-6208-4d16-8b98-450cff001aa5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.243094] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.243094] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1417.243611] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb413bf6-0ab6-4ea6-be8d-8caa9983ef9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.261538] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1417.261538] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520a575b-eb90-1287-bc38-80200ba570c3" [ 1417.261538] env[63379]: _type = "Task" [ 1417.261538] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.279385] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520a575b-eb90-1287-bc38-80200ba570c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.306343] env[63379]: DEBUG oslo_vmware.api [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1779060, 'name': PowerOffVM_Task, 'duration_secs': 0.153335} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.313995] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1417.314260] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1417.316067] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9509de97-852b-4f57-ac96-4c8e0ba9d20d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.356703] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1417.356941] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1417.357134] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Deleting the datastore file [datastore1] a6f7c217-a493-403d-b776-870df4575f2a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1417.357447] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d932f30f-aa3f-474f-ae00-0eb469a80030 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.366050] env[63379]: DEBUG oslo_vmware.api [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for the task: (returnval){ [ 1417.366050] env[63379]: value = "task-1779062" [ 1417.366050] env[63379]: _type = "Task" [ 1417.366050] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.377842] env[63379]: DEBUG oslo_vmware.api [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1779062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.510218] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e272806b-a61c-4c06-bc01-0246eb478768 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.519703] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c474bd22-8774-4fab-850b-bfef0ea3ef46 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.559962] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5b25fb-3e09-4fa0-8f42-5ff6a6b3fc20 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.574021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd77654-4557-4484-a8ed-adbaa0eebb69 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.588163] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "refresh_cache-de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.589614] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquired lock "refresh_cache-de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.589614] env[63379]: DEBUG nova.network.neutron [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1417.592238] env[63379]: DEBUG nova.compute.provider_tree [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.626428] env[63379]: DEBUG oslo_vmware.api [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779059, 'name': PowerOnVM_Task, 'duration_secs': 0.617115} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.627283] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1417.627283] env[63379]: INFO nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Took 11.34 seconds to spawn the instance on the hypervisor. [ 1417.627433] env[63379]: DEBUG nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1417.628210] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6a1774-9c76-48e5-ad74-2d348b4816d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.676506] env[63379]: DEBUG nova.compute.manager [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Received event network-changed-6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1417.676624] env[63379]: DEBUG nova.compute.manager [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Refreshing instance network info cache due to event network-changed-6b70ec9a-65bb-4a1c-9312-97031fc4fc46. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1417.676853] env[63379]: DEBUG oslo_concurrency.lockutils [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] Acquiring lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.677035] env[63379]: DEBUG oslo_concurrency.lockutils [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] Acquired lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.677232] env[63379]: DEBUG nova.network.neutron [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Refreshing network info cache for port 6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1417.723535] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.776069] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520a575b-eb90-1287-bc38-80200ba570c3, 'name': SearchDatastore_Task, 'duration_secs': 0.028336} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.776603] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f415049-7f15-444a-9ca9-157e908a0f8d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.785908] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1417.785908] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fd5a1b-b57a-a9af-642e-8838f1236d9e" [ 1417.785908] env[63379]: _type = "Task" [ 1417.785908] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.797061] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fd5a1b-b57a-a9af-642e-8838f1236d9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.879900] env[63379]: DEBUG oslo_vmware.api [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Task: {'id': task-1779062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400551} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.881995] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1417.881995] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1417.881995] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1417.881995] env[63379]: INFO nova.compute.manager [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1417.883029] env[63379]: DEBUG oslo.service.loopingcall [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1417.883029] env[63379]: DEBUG nova.compute.manager [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1417.883029] env[63379]: DEBUG nova.network.neutron [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1417.910095] env[63379]: DEBUG nova.network.neutron [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.100138] env[63379]: DEBUG nova.scheduler.client.report [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1418.149837] env[63379]: INFO nova.compute.manager [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Took 24.42 seconds to build instance. [ 1418.168501] env[63379]: DEBUG nova.network.neutron [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.235211] env[63379]: DEBUG nova.compute.manager [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Received event network-changed-a524aedc-254a-4394-836b-4136823591d8 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1418.235743] env[63379]: DEBUG nova.compute.manager [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Refreshing instance network info cache due to event network-changed-a524aedc-254a-4394-836b-4136823591d8. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1418.236218] env[63379]: DEBUG oslo_concurrency.lockutils [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] Acquiring lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.236467] env[63379]: DEBUG oslo_concurrency.lockutils [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] Acquired lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.236694] env[63379]: DEBUG nova.network.neutron [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Refreshing network info cache for port a524aedc-254a-4394-836b-4136823591d8 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1418.299216] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fd5a1b-b57a-a9af-642e-8838f1236d9e, 'name': SearchDatastore_Task, 'duration_secs': 0.024009} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.303255] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.303255] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 6b4e80fc-582f-432b-aa99-ec133127578e/6b4e80fc-582f-432b-aa99-ec133127578e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1418.303255] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9535cfa-a243-47c1-a291-04261e91d2d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.312956] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1418.312956] env[63379]: value = "task-1779063" [ 1418.312956] env[63379]: _type = "Task" [ 1418.312956] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.328134] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.414264] env[63379]: DEBUG nova.network.neutron [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.605691] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.728s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.606367] env[63379]: INFO nova.compute.manager [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Migrating [ 1418.606796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.607133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.611752] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.710s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.614129] env[63379]: INFO nova.compute.claims [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1418.629120] env[63379]: DEBUG nova.network.neutron [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Updating instance_info_cache with network_info: [{"id": "55f75417-a04f-44de-a21a-20527e069280", "address": "fa:16:3e:e7:e3:19", "network": {"id": "8553636b-4f88-41bf-87fb-69801f1866ea", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-28532258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0133567c115648aa9f6ee1c5adee833b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55f75417-a0", "ovs_interfaceid": "55f75417-a04f-44de-a21a-20527e069280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.651996] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7908b237-562e-4bcc-9a8e-503620341e27 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.349s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.830840] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779063, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.909939] env[63379]: DEBUG nova.network.neutron [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updated VIF entry in instance network info cache for port 6b70ec9a-65bb-4a1c-9312-97031fc4fc46. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1418.909939] env[63379]: DEBUG nova.network.neutron [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updating instance_info_cache with network_info: [{"id": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "address": "fa:16:3e:8f:e4:09", "network": {"id": "5cdac896-d067-4c9e-9ccd-954fce726e11", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-637047934-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0e95e0ca1804616a5d258396749d295", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b70ec9a-65", "ovs_interfaceid": "6b70ec9a-65bb-4a1c-9312-97031fc4fc46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.917922] env[63379]: INFO nova.compute.manager [-] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Took 1.03 seconds to deallocate network for instance. [ 1419.118614] env[63379]: INFO nova.compute.rpcapi [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1419.118614] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.144139] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Releasing lock "refresh_cache-de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.144139] env[63379]: DEBUG nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Instance network_info: |[{"id": "55f75417-a04f-44de-a21a-20527e069280", "address": "fa:16:3e:e7:e3:19", "network": {"id": "8553636b-4f88-41bf-87fb-69801f1866ea", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-28532258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0133567c115648aa9f6ee1c5adee833b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55f75417-a0", "ovs_interfaceid": "55f75417-a04f-44de-a21a-20527e069280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1419.145590] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:e3:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55f75417-a04f-44de-a21a-20527e069280', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1419.159380] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Creating folder: Project (0133567c115648aa9f6ee1c5adee833b). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1419.161263] env[63379]: DEBUG nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1419.165018] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bf954df-c7fe-4611-a6f4-ab3a2e7033dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.200370] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Created folder: Project (0133567c115648aa9f6ee1c5adee833b) in parent group-v369214. [ 1419.200370] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Creating folder: Instances. Parent ref: group-v369273. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1419.202358] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0666c127-06aa-4dc9-97e3-83735488911b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.214330] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Created folder: Instances in parent group-v369273. [ 1419.214814] env[63379]: DEBUG oslo.service.loopingcall [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1419.214938] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1419.215086] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83ba3851-9c35-4cf6-a254-5fb9a364621a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.245077] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1419.245077] env[63379]: value = "task-1779066" [ 1419.245077] env[63379]: _type = "Task" [ 1419.245077] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.254666] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779066, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.288446] env[63379]: DEBUG nova.network.neutron [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Updated VIF entry in instance network info cache for port a524aedc-254a-4394-836b-4136823591d8. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1419.289015] env[63379]: DEBUG nova.network.neutron [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Updating instance_info_cache with network_info: [{"id": "a524aedc-254a-4394-836b-4136823591d8", "address": "fa:16:3e:63:fe:72", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa524aedc-25", "ovs_interfaceid": "a524aedc-254a-4394-836b-4136823591d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.309488] env[63379]: DEBUG nova.network.neutron [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Successfully updated port: c6139085-d9e8-416c-8a48-9c9e3c07eed1 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1419.335360] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581696} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.335854] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 6b4e80fc-582f-432b-aa99-ec133127578e/6b4e80fc-582f-432b-aa99-ec133127578e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1419.336267] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1419.336659] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2cb61c3-18c2-43b9-96d4-dc3d70494689 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.349537] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1419.349537] env[63379]: value = "task-1779067" [ 1419.349537] env[63379]: _type = "Task" [ 1419.349537] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.360867] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.412623] env[63379]: DEBUG oslo_concurrency.lockutils [req-7a1fc827-2b96-4105-ae20-cbea9adfed1e req-034f9140-f479-4f32-823d-977d0c4e42f9 service nova] Releasing lock "refresh_cache-15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.425224] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.662569] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.662994] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.663557] env[63379]: DEBUG nova.network.neutron [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1419.690899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.762360] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779066, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.797209] env[63379]: DEBUG oslo_concurrency.lockutils [req-49838013-ac9e-4191-8fa4-9f1e6fbdd9b6 req-65fd5b44-35d5-454a-8c56-1df471404adc service nova] Releasing lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.812161] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "refresh_cache-c999d64e-3f5b-4854-8b92-6d0d17f49dd7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.814400] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquired lock "refresh_cache-c999d64e-3f5b-4854-8b92-6d0d17f49dd7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.814400] env[63379]: DEBUG nova.network.neutron [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1419.862522] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0831} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.862995] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.864086] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba0a815-9a7a-4fe6-8c43-b9f359765747 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.898232] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 6b4e80fc-582f-432b-aa99-ec133127578e/6b4e80fc-582f-432b-aa99-ec133127578e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.900329] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cebcec45-d329-4c58-9f33-bcfa1f7fbc65 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.928464] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1419.928464] env[63379]: value = "task-1779068" [ 1419.928464] env[63379]: _type = "Task" [ 1419.928464] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.939659] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.214859] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076e41e3-9db7-4b59-ad3b-c9c4aa023512 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.230341] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2309a86d-70b7-4721-9b4f-e9086870d9e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.271575] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e75e16b-7848-44d8-9183-742c5dbe895e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.287817] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e097a38-7272-476a-b0d3-0a17f1a81cb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.292108] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779066, 'name': CreateVM_Task, 'duration_secs': 0.665181} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.292315] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1420.294505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.294505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.294505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1420.294700] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29b46867-5daa-40e8-b17a-889d088c86b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.305995] env[63379]: DEBUG nova.compute.provider_tree [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.309724] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "d221329b-eee4-42f5-bb27-cf6af0386c04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.309962] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.310190] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "d221329b-eee4-42f5-bb27-cf6af0386c04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.310381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.310677] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.312720] env[63379]: INFO nova.compute.manager [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Terminating instance [ 1420.315681] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1420.315681] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fe252b-7423-38ea-d6a8-0229a173c092" [ 1420.315681] env[63379]: _type = "Task" [ 1420.315681] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.319532] env[63379]: DEBUG nova.compute.manager [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1420.319775] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1420.323806] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4bc935-98a4-463c-84a0-0e407cf67da4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.335958] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fe252b-7423-38ea-d6a8-0229a173c092, 'name': SearchDatastore_Task, 'duration_secs': 0.011639} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.338703] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.339098] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.339893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.339893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.339893] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.340077] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.340293] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffb17407-c75d-46d0-8c2e-ba7ac55bff87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.342129] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bef7448c-defe-4989-bbeb-98dd942615bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.351925] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1420.351925] env[63379]: value = "task-1779069" [ 1420.351925] env[63379]: _type = "Task" [ 1420.351925] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.353145] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.353380] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.357115] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e82c6f22-cd59-4df3-b438-e5a621976ef4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.366438] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.367552] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1420.367552] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5296c7ec-669d-a159-95ed-76fdf53e08ab" [ 1420.367552] env[63379]: _type = "Task" [ 1420.367552] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.378905] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5296c7ec-669d-a159-95ed-76fdf53e08ab, 'name': SearchDatastore_Task, 'duration_secs': 0.011098} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.380167] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-113783ae-4108-428c-b189-542de0b3673b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.389819] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1420.389819] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e86e4b-42fe-83a8-3efa-a5f0424099f4" [ 1420.389819] env[63379]: _type = "Task" [ 1420.389819] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.391481] env[63379]: DEBUG nova.network.neutron [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1420.403021] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e86e4b-42fe-83a8-3efa-a5f0424099f4, 'name': SearchDatastore_Task, 'duration_secs': 0.009725} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.403021] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.403021] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] de671ba9-0d86-4f89-a6bd-ecea9ad0ba85/de671ba9-0d86-4f89-a6bd-ecea9ad0ba85.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1420.403021] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b8d803e-c77d-47e9-b3e7-8de5e4484259 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.410040] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1420.410040] env[63379]: value = "task-1779070" [ 1420.410040] env[63379]: _type = "Task" [ 1420.410040] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.418650] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.422237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "aaaf4b06-ef84-41ba-8054-29582854a9f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.422499] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.422713] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "aaaf4b06-ef84-41ba-8054-29582854a9f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.422896] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.423080] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.427870] env[63379]: INFO nova.compute.manager [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Terminating instance [ 1420.430085] env[63379]: DEBUG nova.compute.manager [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1420.430288] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1420.431622] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1343e3e9-6953-4698-a82c-550d354389a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.443335] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.446367] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d0e5c59-a4d5-42e3-bb65-cc8bd5599cb5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.451081] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779068, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.457775] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1420.457775] env[63379]: value = "task-1779071" [ 1420.457775] env[63379]: _type = "Task" [ 1420.457775] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.469597] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.809375] env[63379]: DEBUG nova.scheduler.client.report [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1420.865570] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779069, 'name': PowerOffVM_Task, 'duration_secs': 0.23511} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.865570] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.865570] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1420.865570] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ed251e9-9c52-4d82-9ff5-abcddd9866df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.903654] env[63379]: DEBUG nova.network.neutron [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [{"id": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "address": "fa:16:3e:d2:92:4e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape034314c-72", "ovs_interfaceid": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.924646] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779070, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.936878] env[63379]: DEBUG nova.network.neutron [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Updating instance_info_cache with network_info: [{"id": "c6139085-d9e8-416c-8a48-9c9e3c07eed1", "address": "fa:16:3e:79:38:a5", "network": {"id": "4f28bd62-f71b-4455-b7f0-24f1ffefa11b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1579310435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0420b33d0e4236b9750b79d94e57af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6139085-d9", "ovs_interfaceid": "c6139085-d9e8-416c-8a48-9c9e3c07eed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.944209] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779068, 'name': ReconfigVM_Task, 'duration_secs': 0.579312} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.944670] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 6b4e80fc-582f-432b-aa99-ec133127578e/6b4e80fc-582f-432b-aa99-ec133127578e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1420.945439] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-344ba551-281e-48b0-9251-ce06bf4431c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.956750] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1420.956874] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1420.957216] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleting the datastore file [datastore1] d221329b-eee4-42f5-bb27-cf6af0386c04 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1420.958120] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1420.958120] env[63379]: value = "task-1779073" [ 1420.958120] env[63379]: _type = "Task" [ 1420.958120] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.958547] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31eaf8d5-a856-4c3c-a452-6e990336c02a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.974569] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779071, 'name': PowerOffVM_Task, 'duration_secs': 0.477002} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.981161] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.981161] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1420.981855] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779073, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.982617] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1420.982617] env[63379]: value = "task-1779074" [ 1420.982617] env[63379]: _type = "Task" [ 1420.982617] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.982617] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-946fb475-0af3-49a2-b4a9-776d2f8edd98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.996499] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.072684] env[63379]: DEBUG nova.compute.manager [req-32b82609-143b-42c0-b1e1-4d2eff041b22 req-ad4bb714-d404-4336-b546-8f8d81973968 service nova] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Received event network-vif-deleted-129136be-c7bf-454e-a408-37372aa8bfd9 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1421.075545] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1421.076142] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1421.076593] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Deleting the datastore file [datastore1] aaaf4b06-ef84-41ba-8054-29582854a9f1 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1421.077430] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1361b72a-d6d1-4054-b399-01648369db75 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.089510] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for the task: (returnval){ [ 1421.089510] env[63379]: value = "task-1779076" [ 1421.089510] env[63379]: _type = "Task" [ 1421.089510] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.107758] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.281541] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Received event network-vif-plugged-55f75417-a04f-44de-a21a-20527e069280 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1421.281885] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquiring lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.282131] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.282313] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.282489] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] No waiting events found dispatching network-vif-plugged-55f75417-a04f-44de-a21a-20527e069280 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1421.282774] env[63379]: WARNING nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Received unexpected event network-vif-plugged-55f75417-a04f-44de-a21a-20527e069280 for instance with vm_state building and task_state spawning. [ 1421.282858] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Received event network-changed-55f75417-a04f-44de-a21a-20527e069280 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1421.283333] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Refreshing instance network info cache due to event network-changed-55f75417-a04f-44de-a21a-20527e069280. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1421.283850] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquiring lock "refresh_cache-de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.283955] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquired lock "refresh_cache-de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.284347] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Refreshing network info cache for port 55f75417-a04f-44de-a21a-20527e069280 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1421.314553] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.315162] env[63379]: DEBUG nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1421.318505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.750s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.319807] env[63379]: INFO nova.compute.claims [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1421.411065] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.427085] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779070, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.440319] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Releasing lock "refresh_cache-c999d64e-3f5b-4854-8b92-6d0d17f49dd7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.440657] env[63379]: DEBUG nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Instance network_info: |[{"id": "c6139085-d9e8-416c-8a48-9c9e3c07eed1", "address": "fa:16:3e:79:38:a5", "network": {"id": "4f28bd62-f71b-4455-b7f0-24f1ffefa11b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1579310435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0420b33d0e4236b9750b79d94e57af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6139085-d9", "ovs_interfaceid": "c6139085-d9e8-416c-8a48-9c9e3c07eed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1421.441391] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:38:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb68953b-dee5-4d9d-b47b-277336ba76dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6139085-d9e8-416c-8a48-9c9e3c07eed1', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1421.453520] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Creating folder: Project (6e0420b33d0e4236b9750b79d94e57af). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1421.454434] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-deeb1a79-38d9-489d-be74-b3d7c0420013 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.471932] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Created folder: Project (6e0420b33d0e4236b9750b79d94e57af) in parent group-v369214. [ 1421.471932] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Creating folder: Instances. Parent ref: group-v369276. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1421.471932] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-750b3ee6-beae-42fc-a2de-1b5d8caaa856 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.477563] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779073, 'name': Rename_Task, 'duration_secs': 0.272639} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.478125] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1421.478371] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1becee25-4a8f-4fa6-a882-9d811270e5f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.486198] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1421.486198] env[63379]: value = "task-1779079" [ 1421.486198] env[63379]: _type = "Task" [ 1421.486198] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.496944] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Created folder: Instances in parent group-v369276. [ 1421.497377] env[63379]: DEBUG oslo.service.loopingcall [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1421.498045] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1421.498591] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e60c6a0f-955e-435c-a91f-b8df177681ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.521256] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.521568] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.527051] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1421.527051] env[63379]: value = "task-1779080" [ 1421.527051] env[63379]: _type = "Task" [ 1421.527051] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.536404] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779080, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.600405] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.662336] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "571bb238-9cf3-475e-b596-a9609acc8696" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.662470] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "571bb238-9cf3-475e-b596-a9609acc8696" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.662753] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "571bb238-9cf3-475e-b596-a9609acc8696-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.662991] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "571bb238-9cf3-475e-b596-a9609acc8696-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.663248] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "571bb238-9cf3-475e-b596-a9609acc8696-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.668219] env[63379]: INFO nova.compute.manager [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Terminating instance [ 1421.670727] env[63379]: DEBUG nova.compute.manager [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1421.670961] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1421.672501] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fcc5de-91f2-4df1-b2ea-e50f6e93ef32 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.688121] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1421.688447] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "915aec20-5765-4aad-8b0f-f2d71b7d6428" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.688702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.689300] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04978cc0-a93a-461f-b849-05420cc00e45 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.697769] env[63379]: DEBUG oslo_vmware.api [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1421.697769] env[63379]: value = "task-1779084" [ 1421.697769] env[63379]: _type = "Task" [ 1421.697769] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.708217] env[63379]: DEBUG oslo_vmware.api [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.825589] env[63379]: DEBUG nova.compute.utils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1421.832832] env[63379]: DEBUG nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1421.833160] env[63379]: DEBUG nova.network.neutron [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1421.926823] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779070, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.941709] env[63379]: DEBUG nova.policy [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0530d2698d245edae9ba088734adf0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e655baa29c4c88b8648d273f92ed4b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1422.005813] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.010580] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779079, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.037940] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779080, 'name': CreateVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.101430] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.213905] env[63379]: DEBUG oslo_vmware.api [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779084, 'name': PowerOffVM_Task, 'duration_secs': 0.183014} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.215407] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1422.215407] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1422.215407] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1afdfcd-ed71-4d1e-970a-d200e2a23bbc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.305665] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Updated VIF entry in instance network info cache for port 55f75417-a04f-44de-a21a-20527e069280. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.306253] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Updating instance_info_cache with network_info: [{"id": "55f75417-a04f-44de-a21a-20527e069280", "address": "fa:16:3e:e7:e3:19", "network": {"id": "8553636b-4f88-41bf-87fb-69801f1866ea", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-28532258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0133567c115648aa9f6ee1c5adee833b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55f75417-a0", "ovs_interfaceid": "55f75417-a04f-44de-a21a-20527e069280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.339092] env[63379]: DEBUG nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1422.427703] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779070, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.508166] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.512177] env[63379]: DEBUG oslo_vmware.api [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779079, 'name': PowerOnVM_Task, 'duration_secs': 0.558736} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.520209] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1422.520209] env[63379]: INFO nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Took 10.85 seconds to spawn the instance on the hypervisor. [ 1422.520209] env[63379]: DEBUG nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1422.521035] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba9c3e3-3798-48a6-9cc4-9b94e2c1baf9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.552697] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779080, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.595922] env[63379]: DEBUG nova.network.neutron [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Successfully created port: 89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1422.602494] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.809730] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Releasing lock "refresh_cache-de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.810028] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Received event network-changed-65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1422.810212] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Refreshing instance network info cache due to event network-changed-65e3bc3b-bfed-4dd6-be59-87481a211014. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1422.810420] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquiring lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.810560] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquired lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.810723] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Refreshing network info cache for port 65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.916498] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dc91af-6d2f-4a46-b2d0-dd1225f9db4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.931249] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779070, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.156138} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.931759] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] de671ba9-0d86-4f89-a6bd-ecea9ad0ba85/de671ba9-0d86-4f89-a6bd-ecea9ad0ba85.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1422.931875] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1422.932757] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cc9f4a-00c4-4fe2-b60d-f0a234f49064 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.935804] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42a78b91-628e-4b13-9ea5-d8bf309eb70a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.940809] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75bb2f40-fb3d-4045-a5db-ecd347e6ec46 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.960537] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance 'aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1422.993129] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1422.993129] env[63379]: value = "task-1779086" [ 1422.993129] env[63379]: _type = "Task" [ 1422.993129] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.993987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541409ba-c296-4219-af8d-b034b1187829 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.009728] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779086, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.014701] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6b0b32-a984-417d-bf3d-c2180b50e9a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.019563] env[63379]: DEBUG oslo_vmware.api [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.686564} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.020549] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.020758] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1423.021824] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1423.021824] env[63379]: INFO nova.compute.manager [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Took 2.70 seconds to destroy the instance on the hypervisor. [ 1423.021824] env[63379]: DEBUG oslo.service.loopingcall [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.022379] env[63379]: DEBUG nova.compute.manager [-] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1423.022501] env[63379]: DEBUG nova.network.neutron [-] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1423.032565] env[63379]: DEBUG nova.compute.provider_tree [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.046021] env[63379]: INFO nova.compute.manager [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Took 27.44 seconds to build instance. [ 1423.051945] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779080, 'name': CreateVM_Task, 'duration_secs': 1.383658} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.052305] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1423.052940] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.053118] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.053438] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1423.053686] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-661fdb5d-8b92-4cce-8b00-80f6ef341825 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.060374] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1423.060374] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524ae7fb-e2e2-7824-7a01-856a5ac31a94" [ 1423.060374] env[63379]: _type = "Task" [ 1423.060374] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.069729] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524ae7fb-e2e2-7824-7a01-856a5ac31a94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.100306] env[63379]: DEBUG oslo_vmware.api [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Task: {'id': task-1779076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.583325} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.100593] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.100780] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1423.100954] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1423.101145] env[63379]: INFO nova.compute.manager [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Took 2.67 seconds to destroy the instance on the hypervisor. [ 1423.101381] env[63379]: DEBUG oslo.service.loopingcall [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.101562] env[63379]: DEBUG nova.compute.manager [-] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1423.101655] env[63379]: DEBUG nova.network.neutron [-] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1423.350201] env[63379]: DEBUG nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1423.384646] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1423.384798] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1423.384951] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1423.385156] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1423.385347] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1423.385528] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1423.385663] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1423.385834] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1423.386010] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1423.386183] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1423.386357] env[63379]: DEBUG nova.virt.hardware [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1423.387242] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0015820-0c55-4141-bc1c-43b92ce22798 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.396068] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74033063-6ccc-4c67-bb71-4dc1174a107d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.469020] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1423.469020] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-177e1d30-4cb7-4b76-9e50-a9e7640a1c99 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.481292] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1423.481292] env[63379]: value = "task-1779087" [ 1423.481292] env[63379]: _type = "Task" [ 1423.481292] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.500315] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.500383] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.500870] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.509069] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.509310] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.538234] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071255} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.538234] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1423.538234] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ac71c2-05c8-45fe-8e60-fc41aa7c52d3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.538234] env[63379]: DEBUG nova.scheduler.client.report [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1423.553216] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] de671ba9-0d86-4f89-a6bd-ecea9ad0ba85/de671ba9-0d86-4f89-a6bd-ecea9ad0ba85.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1423.553216] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f7113c1-a230-4381-92c5-34957fa422ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.566483] env[63379]: DEBUG oslo_concurrency.lockutils [None req-497e549c-368e-4d15-9e22-cb399fc74709 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "6b4e80fc-582f-432b-aa99-ec133127578e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.688s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.581703] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524ae7fb-e2e2-7824-7a01-856a5ac31a94, 'name': SearchDatastore_Task, 'duration_secs': 0.009698} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.583871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.583871] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1423.584093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.584753] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.584753] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1423.588156] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1423.588156] env[63379]: value = "task-1779088" [ 1423.588156] env[63379]: _type = "Task" [ 1423.588156] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.588156] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-135b0922-7587-4c7c-b2f9-4e3434835b1a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.606402] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.606402] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1423.606402] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1423.606869] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf56b962-ac7a-4366-a951-17d1a740fbf8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.613833] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1423.613833] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5283869d-2f0c-5cc7-f2a5-5b5a0ce5aae3" [ 1423.613833] env[63379]: _type = "Task" [ 1423.613833] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.623920] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5283869d-2f0c-5cc7-f2a5-5b5a0ce5aae3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.646715] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updated VIF entry in instance network info cache for port 65e3bc3b-bfed-4dd6-be59-87481a211014. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1423.647255] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updating instance_info_cache with network_info: [{"id": "65e3bc3b-bfed-4dd6-be59-87481a211014", "address": "fa:16:3e:cf:dd:6f", "network": {"id": "66f99999-2093-485c-98b2-12d4a173be2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1040360959-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e8ea1d9b2b194236ac9e91082b291b97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65e3bc3b-bf", "ovs_interfaceid": "65e3bc3b-bfed-4dd6-be59-87481a211014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.971712] env[63379]: DEBUG nova.network.neutron [-] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.993881] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779087, 'name': PowerOffVM_Task, 'duration_secs': 0.224918} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.994265] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1423.994532] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance 'aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1424.010827] env[63379]: DEBUG nova.network.neutron [-] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.052089] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.052662] env[63379]: DEBUG nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1424.055322] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.829s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.056969] env[63379]: INFO nova.compute.claims [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1424.064032] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1424.064303] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1424.064550] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleting the datastore file [datastore1] 571bb238-9cf3-475e-b596-a9609acc8696 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.065823] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-112fc1b3-1fa4-4c4f-a160-31030023341a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.068796] env[63379]: DEBUG nova.compute.manager [req-7189c4b1-1764-4184-b3b2-901373b06ce7 req-eb7166c3-1878-4437-b3ff-d5c52fe1dd5a service nova] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Received event network-vif-deleted-72ec59a1-3694-48aa-884a-9e0b1cebb603 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1424.074198] env[63379]: DEBUG nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1424.078474] env[63379]: DEBUG oslo_vmware.api [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1424.078474] env[63379]: value = "task-1779089" [ 1424.078474] env[63379]: _type = "Task" [ 1424.078474] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.088953] env[63379]: DEBUG oslo_vmware.api [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.100518] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779088, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.125351] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5283869d-2f0c-5cc7-f2a5-5b5a0ce5aae3, 'name': SearchDatastore_Task, 'duration_secs': 0.011705} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.131179] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d931e25b-7175-43a3-b7f1-24c58983fde2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.139567] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1424.139567] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f9a258-78eb-2c56-ae11-59bc2741409f" [ 1424.139567] env[63379]: _type = "Task" [ 1424.139567] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.149431] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f9a258-78eb-2c56-ae11-59bc2741409f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.151197] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Releasing lock "refresh_cache-25090d85-cd10-44fc-aa9d-071ada14f249" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.151524] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Received event network-vif-plugged-c6139085-d9e8-416c-8a48-9c9e3c07eed1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1424.151993] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquiring lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.151993] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.152358] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.152419] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] No waiting events found dispatching network-vif-plugged-c6139085-d9e8-416c-8a48-9c9e3c07eed1 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1424.153482] env[63379]: WARNING nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Received unexpected event network-vif-plugged-c6139085-d9e8-416c-8a48-9c9e3c07eed1 for instance with vm_state building and task_state spawning. [ 1424.153482] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Received event network-changed-c6139085-d9e8-416c-8a48-9c9e3c07eed1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1424.153482] env[63379]: DEBUG nova.compute.manager [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Refreshing instance network info cache due to event network-changed-c6139085-d9e8-416c-8a48-9c9e3c07eed1. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1424.153482] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquiring lock "refresh_cache-c999d64e-3f5b-4854-8b92-6d0d17f49dd7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.153482] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Acquired lock "refresh_cache-c999d64e-3f5b-4854-8b92-6d0d17f49dd7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.153482] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Refreshing network info cache for port c6139085-d9e8-416c-8a48-9c9e3c07eed1 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.272205] env[63379]: DEBUG nova.compute.manager [None req-2b6e627e-7c63-476f-b09d-34b0f372cfb9 tempest-ServerExternalEventsTest-341958889 tempest-ServerExternalEventsTest-341958889-project] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Received event network-changed {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1424.272426] env[63379]: DEBUG nova.compute.manager [None req-2b6e627e-7c63-476f-b09d-34b0f372cfb9 tempest-ServerExternalEventsTest-341958889 tempest-ServerExternalEventsTest-341958889-project] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Refreshing instance network info cache due to event network-changed. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1424.272640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2b6e627e-7c63-476f-b09d-34b0f372cfb9 tempest-ServerExternalEventsTest-341958889 tempest-ServerExternalEventsTest-341958889-project] Acquiring lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.272782] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2b6e627e-7c63-476f-b09d-34b0f372cfb9 tempest-ServerExternalEventsTest-341958889 tempest-ServerExternalEventsTest-341958889-project] Acquired lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.272938] env[63379]: DEBUG nova.network.neutron [None req-2b6e627e-7c63-476f-b09d-34b0f372cfb9 tempest-ServerExternalEventsTest-341958889 tempest-ServerExternalEventsTest-341958889-project] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.457103] env[63379]: DEBUG nova.network.neutron [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Successfully updated port: 89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1424.457965] env[63379]: WARNING oslo_messaging._drivers.amqpdriver [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1424.475453] env[63379]: INFO nova.compute.manager [-] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Took 1.45 seconds to deallocate network for instance. [ 1424.500845] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1424.501129] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1424.501463] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1424.501539] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1424.501626] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1424.501765] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1424.502101] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1424.502613] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1424.502699] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1424.503086] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1424.504065] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1424.511371] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84482005-60a6-4b1a-8923-1d3c8370361e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.526463] env[63379]: INFO nova.compute.manager [-] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Took 1.42 seconds to deallocate network for instance. [ 1424.535894] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1424.535894] env[63379]: value = "task-1779090" [ 1424.535894] env[63379]: _type = "Task" [ 1424.535894] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.545880] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.562877] env[63379]: DEBUG nova.compute.utils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1424.563814] env[63379]: DEBUG nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1424.564023] env[63379]: DEBUG nova.network.neutron [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1424.601170] env[63379]: DEBUG oslo_vmware.api [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160405} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.601955] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1424.602117] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1424.602335] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1424.602523] env[63379]: INFO nova.compute.manager [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Took 2.93 seconds to destroy the instance on the hypervisor. [ 1424.602768] env[63379]: DEBUG oslo.service.loopingcall [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.603283] env[63379]: DEBUG nova.compute.manager [-] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1424.603440] env[63379]: DEBUG nova.network.neutron [-] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1424.608696] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779088, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.610633] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.635524] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "efc5b3b6-bed4-484c-8a0c-65810747382d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.635926] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.636037] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "efc5b3b6-bed4-484c-8a0c-65810747382d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.636694] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.638348] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.646199] env[63379]: INFO nova.compute.manager [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Terminating instance [ 1424.649370] env[63379]: DEBUG nova.compute.manager [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1424.649741] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1424.652274] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62852558-1e52-4db1-9d3e-349903a2d7e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.661633] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f9a258-78eb-2c56-ae11-59bc2741409f, 'name': SearchDatastore_Task, 'duration_secs': 0.019869} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.662643] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.663018] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c999d64e-3f5b-4854-8b92-6d0d17f49dd7/c999d64e-3f5b-4854-8b92-6d0d17f49dd7.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1424.664617] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5352ff1d-a37b-408a-b10e-0c7dbc942347 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.670505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "25090d85-cd10-44fc-aa9d-071ada14f249" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.671191] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "25090d85-cd10-44fc-aa9d-071ada14f249" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.671370] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "25090d85-cd10-44fc-aa9d-071ada14f249-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.672222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "25090d85-cd10-44fc-aa9d-071ada14f249-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.672222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "25090d85-cd10-44fc-aa9d-071ada14f249-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.673954] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1424.674808] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51c6c154-88df-4bb7-9ee9-7268778e2fa7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.677423] env[63379]: INFO nova.compute.manager [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Terminating instance [ 1424.681362] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1424.681362] env[63379]: value = "task-1779092" [ 1424.681362] env[63379]: _type = "Task" [ 1424.681362] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.682319] env[63379]: DEBUG nova.compute.manager [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1424.682808] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1424.683904] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64149215-03e3-4c7c-b257-8d689c6bb1d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.692903] env[63379]: DEBUG oslo_vmware.api [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1424.692903] env[63379]: value = "task-1779093" [ 1424.692903] env[63379]: _type = "Task" [ 1424.692903] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.694726] env[63379]: DEBUG nova.policy [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76ef14f3e15341408d2ac3bb139ff93d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '862e52cb5e924bbebb353a9ced8f5e80', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1424.708565] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.708942] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1424.711976] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5361a9f6-3e92-4854-a9c8-e967d95621fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.718661] env[63379]: DEBUG oslo_vmware.api [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.724858] env[63379]: DEBUG oslo_vmware.api [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1424.724858] env[63379]: value = "task-1779094" [ 1424.724858] env[63379]: _type = "Task" [ 1424.724858] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.735373] env[63379]: DEBUG oslo_vmware.api [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779094, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.964130] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.964455] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.964667] env[63379]: DEBUG nova.network.neutron [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.986600] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.033527] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.050861] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779090, 'name': ReconfigVM_Task, 'duration_secs': 0.242097} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.051552] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance 'aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1425.069627] env[63379]: DEBUG nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1425.110471] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779088, 'name': ReconfigVM_Task, 'duration_secs': 1.302407} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.115589] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Reconfigured VM instance instance-00000014 to attach disk [datastore1] de671ba9-0d86-4f89-a6bd-ecea9ad0ba85/de671ba9-0d86-4f89-a6bd-ecea9ad0ba85.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1425.120742] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8332058e-db26-4ee8-8d82-158d882db6cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.133822] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1425.133822] env[63379]: value = "task-1779095" [ 1425.133822] env[63379]: _type = "Task" [ 1425.133822] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.151643] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779095, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.211419] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779092, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.221896] env[63379]: DEBUG oslo_vmware.api [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779093, 'name': PowerOffVM_Task, 'duration_secs': 0.260682} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.222219] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.223065] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1425.224833] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4deee73c-4056-4ae4-bbbe-2efbb6428260 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.238668] env[63379]: DEBUG oslo_vmware.api [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779094, 'name': PowerOffVM_Task, 'duration_secs': 0.224035} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.238955] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.239143] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1425.239401] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82fd66dc-4cd6-4078-a652-b8ba0502edd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.306860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "6b4e80fc-582f-432b-aa99-ec133127578e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.307147] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "6b4e80fc-582f-432b-aa99-ec133127578e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.307426] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "6b4e80fc-582f-432b-aa99-ec133127578e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.307756] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "6b4e80fc-582f-432b-aa99-ec133127578e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.307804] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "6b4e80fc-582f-432b-aa99-ec133127578e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.316376] env[63379]: INFO nova.compute.manager [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Terminating instance [ 1425.318907] env[63379]: DEBUG nova.compute.manager [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1425.319091] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1425.320038] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf9cdbb-160a-43fa-80cf-b65128b55533 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.329895] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1425.330277] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3ca9754-45aa-4a4b-ab5a-5cc07e600eb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.341020] env[63379]: DEBUG oslo_vmware.api [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1425.341020] env[63379]: value = "task-1779098" [ 1425.341020] env[63379]: _type = "Task" [ 1425.341020] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.348196] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.348440] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.348623] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleting the datastore file [datastore1] efc5b3b6-bed4-484c-8a0c-65810747382d {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.349025] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4392b9a9-a2d3-43d5-8bf9-3baad6af7cfd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.359749] env[63379]: DEBUG oslo_vmware.api [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.363165] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1425.363419] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1425.363554] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Deleting the datastore file [datastore1] 25090d85-cd10-44fc-aa9d-071ada14f249 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1425.363809] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f3ca3a6-0b5f-4b7d-be1a-4f5069646e34 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.370107] env[63379]: DEBUG oslo_vmware.api [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1425.370107] env[63379]: value = "task-1779099" [ 1425.370107] env[63379]: _type = "Task" [ 1425.370107] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.372019] env[63379]: DEBUG oslo_vmware.api [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for the task: (returnval){ [ 1425.372019] env[63379]: value = "task-1779100" [ 1425.372019] env[63379]: _type = "Task" [ 1425.372019] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.382850] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Updated VIF entry in instance network info cache for port c6139085-d9e8-416c-8a48-9c9e3c07eed1. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.383234] env[63379]: DEBUG nova.network.neutron [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Updating instance_info_cache with network_info: [{"id": "c6139085-d9e8-416c-8a48-9c9e3c07eed1", "address": "fa:16:3e:79:38:a5", "network": {"id": "4f28bd62-f71b-4455-b7f0-24f1ffefa11b", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1579310435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e0420b33d0e4236b9750b79d94e57af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb68953b-dee5-4d9d-b47b-277336ba76dc", "external-id": "nsx-vlan-transportzone-168", "segmentation_id": 168, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6139085-d9", "ovs_interfaceid": "c6139085-d9e8-416c-8a48-9c9e3c07eed1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.391239] env[63379]: DEBUG oslo_vmware.api [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.391499] env[63379]: DEBUG oslo_vmware.api [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.517489] env[63379]: DEBUG nova.network.neutron [-] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.536808] env[63379]: DEBUG nova.network.neutron [None req-2b6e627e-7c63-476f-b09d-34b0f372cfb9 tempest-ServerExternalEventsTest-341958889 tempest-ServerExternalEventsTest-341958889-project] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Updating instance_info_cache with network_info: [{"id": "a524aedc-254a-4394-836b-4136823591d8", "address": "fa:16:3e:63:fe:72", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa524aedc-25", "ovs_interfaceid": "a524aedc-254a-4394-836b-4136823591d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.558651] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1425.558965] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1425.559274] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1425.559559] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1425.559738] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1425.560219] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1425.560219] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1425.560315] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1425.560506] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1425.560695] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1425.560918] env[63379]: DEBUG nova.virt.hardware [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1425.568779] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Reconfiguring VM instance instance-00000009 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1425.569631] env[63379]: DEBUG nova.network.neutron [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1425.575246] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2808ae8c-3fd9-4fd9-b968-618f13d41535 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.604842] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1425.604842] env[63379]: value = "task-1779101" [ 1425.604842] env[63379]: _type = "Task" [ 1425.604842] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.619206] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.645248] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779095, 'name': Rename_Task, 'duration_secs': 0.45359} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.648327] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1425.649387] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1891b84-b4f5-4fea-b4dd-50c2eacd0961 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.660192] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1425.660192] env[63379]: value = "task-1779102" [ 1425.660192] env[63379]: _type = "Task" [ 1425.660192] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.667558] env[63379]: DEBUG nova.network.neutron [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Successfully created port: d22964f6-f2df-4a65-9d6f-8ed548989938 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1425.676650] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779102, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.697980] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "0edadcca-042e-440b-985b-6338e20265fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.698768] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "0edadcca-042e-440b-985b-6338e20265fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.698768] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "0edadcca-042e-440b-985b-6338e20265fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.698768] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "0edadcca-042e-440b-985b-6338e20265fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.699259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "0edadcca-042e-440b-985b-6338e20265fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.701075] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779092, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695054} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.701466] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c999d64e-3f5b-4854-8b92-6d0d17f49dd7/c999d64e-3f5b-4854-8b92-6d0d17f49dd7.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1425.701563] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1425.703062] env[63379]: INFO nova.compute.manager [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Terminating instance [ 1425.705313] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74302b1a-2bfa-4367-a1a6-9f397c67baca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.708947] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c985d38-db1a-4056-8910-34ff384b51ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.713782] env[63379]: DEBUG nova.compute.manager [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1425.714933] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1425.715862] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f82c9b1-c172-4cc3-b125-0f5a61e39bdf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.730896] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5757ce-9367-43e2-bb8f-3d59b0d096cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.735545] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1425.736067] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1425.736067] env[63379]: value = "task-1779103" [ 1425.736067] env[63379]: _type = "Task" [ 1425.736067] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.739981] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-266d2b03-a12c-4859-a44e-7e00a3d3ba38 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.788599] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df77c232-617b-4e8b-a028-e8cb7bf8b872 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.795643] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1425.795643] env[63379]: value = "task-1779104" [ 1425.795643] env[63379]: _type = "Task" [ 1425.795643] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.795879] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779103, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.807126] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e5294f-d0e7-4f04-b32d-4983fd528adf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.817043] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.829346] env[63379]: DEBUG nova.compute.provider_tree [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.852153] env[63379]: DEBUG oslo_vmware.api [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779098, 'name': PowerOffVM_Task, 'duration_secs': 0.230045} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.852153] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1425.852322] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1425.852609] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7e4f04c-827d-4da3-95db-bdc54032e328 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.884991] env[63379]: DEBUG oslo_vmware.api [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389504} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.888976] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1425.888976] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1425.888976] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1425.888976] env[63379]: INFO nova.compute.manager [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1425.889765] env[63379]: DEBUG oslo.service.loopingcall [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.889765] env[63379]: DEBUG oslo_vmware.api [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Task: {'id': task-1779100, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.383853} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.889830] env[63379]: DEBUG nova.compute.manager [-] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1425.889957] env[63379]: DEBUG nova.network.neutron [-] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.891766] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1425.891766] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1425.892418] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1425.892418] env[63379]: INFO nova.compute.manager [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1425.892537] env[63379]: DEBUG oslo.service.loopingcall [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.893070] env[63379]: DEBUG oslo_concurrency.lockutils [req-60e710a9-a8dc-420f-924a-225b239ef10a req-009d3f6a-5e64-4966-9474-ce4b722302a0 service nova] Releasing lock "refresh_cache-c999d64e-3f5b-4854-8b92-6d0d17f49dd7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.893491] env[63379]: DEBUG nova.compute.manager [-] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1425.893623] env[63379]: DEBUG nova.network.neutron [-] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.922902] env[63379]: DEBUG nova.network.neutron [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updating instance_info_cache with network_info: [{"id": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "address": "fa:16:3e:01:a2:40", "network": {"id": "42ec9777-27c5-4516-be87-12d549df72cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1493935153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e655baa29c4c88b8648d273f92ed4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89d7e5cf-c8", "ovs_interfaceid": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.019978] env[63379]: INFO nova.compute.manager [-] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Took 1.42 seconds to deallocate network for instance. [ 1426.047024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2b6e627e-7c63-476f-b09d-34b0f372cfb9 tempest-ServerExternalEventsTest-341958889 tempest-ServerExternalEventsTest-341958889-project] Releasing lock "refresh_cache-6b4e80fc-582f-432b-aa99-ec133127578e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.100584] env[63379]: DEBUG nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1426.117761] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779101, 'name': ReconfigVM_Task, 'duration_secs': 0.231344} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.118223] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Reconfigured VM instance instance-00000009 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1426.119366] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef8ea30-6605-422d-b0f9-4477c571a07f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.153362] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae/aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1426.157811] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aac6088b-d87d-49fd-844f-bdfe3e1f2e7f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.171105] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1426.171233] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1426.171420] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Deleting the datastore file [datastore1] 6b4e80fc-582f-432b-aa99-ec133127578e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1426.174771] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1426.174771] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1426.174771] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1426.174771] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1426.174771] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1426.174771] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1426.175036] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1426.175648] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1426.175648] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1426.175648] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1426.178243] env[63379]: DEBUG nova.virt.hardware [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1426.178243] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3a619f2-638b-4183-90b8-5e3e8a85854b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.178968] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da62a589-8a44-4c97-88ab-c8637390bd91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.195251] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c78165-9321-4e0f-9f8b-611a879307f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.199446] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779102, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.202260] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1426.202260] env[63379]: value = "task-1779107" [ 1426.202260] env[63379]: _type = "Task" [ 1426.202260] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.202678] env[63379]: DEBUG oslo_vmware.api [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for the task: (returnval){ [ 1426.202678] env[63379]: value = "task-1779106" [ 1426.202678] env[63379]: _type = "Task" [ 1426.202678] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.224923] env[63379]: DEBUG nova.compute.manager [req-ed04d49a-3d0b-44a4-9114-77dae7c4387d req-a39f3f4e-5eb5-461a-91c8-db98ff3de397 service nova] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Received event network-vif-deleted-5bb23315-a5dc-438f-bb8e-fc90360f23ec {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1426.234823] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.235537] env[63379]: DEBUG oslo_vmware.api [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.253983] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088169} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.254348] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1426.255663] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d093986f-daa0-4466-b438-3edb0cfe878a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.282166] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] c999d64e-3f5b-4854-8b92-6d0d17f49dd7/c999d64e-3f5b-4854-8b92-6d0d17f49dd7.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1426.282820] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42ad8ee9-b45f-4589-969c-82b7e2ea2ed9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.310033] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.311644] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1426.311644] env[63379]: value = "task-1779108" [ 1426.311644] env[63379]: _type = "Task" [ 1426.311644] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.321500] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779108, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.336793] env[63379]: DEBUG nova.scheduler.client.report [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1426.403824] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.404089] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.426391] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Releasing lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.426756] env[63379]: DEBUG nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Instance network_info: |[{"id": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "address": "fa:16:3e:01:a2:40", "network": {"id": "42ec9777-27c5-4516-be87-12d549df72cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1493935153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e655baa29c4c88b8648d273f92ed4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89d7e5cf-c8", "ovs_interfaceid": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1426.427167] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:a2:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89d7e5cf-c802-47c1-97bd-981796ed50c7', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1426.435114] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Creating folder: Project (86e655baa29c4c88b8648d273f92ed4b). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1426.436112] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c9221e4-9beb-46a0-a60c-c0a805659674 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.451170] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Created folder: Project (86e655baa29c4c88b8648d273f92ed4b) in parent group-v369214. [ 1426.451398] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Creating folder: Instances. Parent ref: group-v369282. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1426.451643] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53e1f6a5-9d7b-42a7-95be-bf356d9b6c4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.463158] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Created folder: Instances in parent group-v369282. [ 1426.463158] env[63379]: DEBUG oslo.service.loopingcall [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.463158] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1426.463158] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1504702c-6835-4dc2-ae80-d280da83ace0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.485221] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1426.485221] env[63379]: value = "task-1779111" [ 1426.485221] env[63379]: _type = "Task" [ 1426.485221] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.496353] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779111, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.526396] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.688433] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779102, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.717111] env[63379]: DEBUG nova.compute.manager [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Received event network-vif-deleted-901f3c6f-920f-4eed-b3b6-2ba116322aae {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1426.717349] env[63379]: DEBUG nova.compute.manager [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Received event network-vif-plugged-89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1426.717549] env[63379]: DEBUG oslo_concurrency.lockutils [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] Acquiring lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.717898] env[63379]: DEBUG oslo_concurrency.lockutils [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.717968] env[63379]: DEBUG oslo_concurrency.lockutils [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.718105] env[63379]: DEBUG nova.compute.manager [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] No waiting events found dispatching network-vif-plugged-89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1426.718364] env[63379]: WARNING nova.compute.manager [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Received unexpected event network-vif-plugged-89d7e5cf-c802-47c1-97bd-981796ed50c7 for instance with vm_state building and task_state spawning. [ 1426.718448] env[63379]: DEBUG nova.compute.manager [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Received event network-changed-89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1426.718696] env[63379]: DEBUG nova.compute.manager [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Refreshing instance network info cache due to event network-changed-89d7e5cf-c802-47c1-97bd-981796ed50c7. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1426.718823] env[63379]: DEBUG oslo_concurrency.lockutils [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] Acquiring lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.718960] env[63379]: DEBUG oslo_concurrency.lockutils [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] Acquired lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.719190] env[63379]: DEBUG nova.network.neutron [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Refreshing network info cache for port 89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.727566] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.731845] env[63379]: DEBUG oslo_vmware.api [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Task: {'id': task-1779106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152526} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.733973] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.733973] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1426.733973] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1426.734128] env[63379]: INFO nova.compute.manager [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Took 1.42 seconds to destroy the instance on the hypervisor. [ 1426.734398] env[63379]: DEBUG oslo.service.loopingcall [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.734867] env[63379]: DEBUG nova.compute.manager [-] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1426.735019] env[63379]: DEBUG nova.network.neutron [-] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1426.747617] env[63379]: DEBUG nova.network.neutron [-] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.826391] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.831555] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.842818] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.843570] env[63379]: DEBUG nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1426.847796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 19.380s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.848990] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.849306] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1426.849775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.841s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.850342] env[63379]: DEBUG nova.objects.instance [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lazy-loading 'resources' on Instance uuid 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1426.854797] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d2f461-af6c-48b2-aebb-0b1e244a59ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.869719] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e4a463-93f9-4f8f-aa59-10caf99a2bca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.890845] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f82cbb2-58c5-4dcc-adaa-54633ba997ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.900629] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2953676-ce93-4979-bcd9-60c000821d67 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.942717] env[63379]: DEBUG nova.network.neutron [-] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.945278] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181065MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1426.945434] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.996918] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779111, 'name': CreateVM_Task, 'duration_secs': 0.419232} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.997168] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1426.997883] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.998179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.998408] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1426.998678] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-490b6de8-12bc-43d5-9715-3ccc2e4d2a74 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.005125] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1427.005125] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5294620d-9e87-b1b4-9c4f-201003928cae" [ 1427.005125] env[63379]: _type = "Task" [ 1427.005125] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.015292] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5294620d-9e87-b1b4-9c4f-201003928cae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.194447] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779102, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.215178] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779107, 'name': ReconfigVM_Task, 'duration_secs': 0.923269} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.215793] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Reconfigured VM instance instance-00000009 to attach disk [datastore1] aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae/aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.215793] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance 'aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1427.252248] env[63379]: INFO nova.compute.manager [-] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Took 1.36 seconds to deallocate network for instance. [ 1427.311094] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779104, 'name': PowerOffVM_Task, 'duration_secs': 1.119733} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.313108] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1427.313108] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1427.313108] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-427c9997-de82-49e3-bf0e-95945634bf28 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.322874] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779108, 'name': ReconfigVM_Task, 'duration_secs': 0.740082} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.323599] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Reconfigured VM instance instance-00000015 to attach disk [datastore1] c999d64e-3f5b-4854-8b92-6d0d17f49dd7/c999d64e-3f5b-4854-8b92-6d0d17f49dd7.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1427.327101] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5b541d9-8d57-4b17-8af1-f260aba5f90c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.337025] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1427.337025] env[63379]: value = "task-1779114" [ 1427.337025] env[63379]: _type = "Task" [ 1427.337025] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.347344] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779114, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.357454] env[63379]: DEBUG nova.compute.utils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1427.359161] env[63379]: DEBUG nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1427.359310] env[63379]: DEBUG nova.network.neutron [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1427.406791] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1427.407040] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1427.407240] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleting the datastore file [datastore1] 0edadcca-042e-440b-985b-6338e20265fa {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1427.407509] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44506d95-66e4-45e2-b18a-9415f12914d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.415491] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for the task: (returnval){ [ 1427.415491] env[63379]: value = "task-1779115" [ 1427.415491] env[63379]: _type = "Task" [ 1427.415491] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.424336] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.446220] env[63379]: INFO nova.compute.manager [-] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Took 1.56 seconds to deallocate network for instance. [ 1427.468070] env[63379]: DEBUG nova.policy [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9a12fb931894c6eacbe28b8be929333', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c2d8da8e40749adbe05070135cea8e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1427.519576] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5294620d-9e87-b1b4-9c4f-201003928cae, 'name': SearchDatastore_Task, 'duration_secs': 0.01245} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.519576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.519576] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1427.519576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.519576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.519733] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1427.519875] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-163783eb-05a8-4e49-9e18-1c488dd03e66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.529884] env[63379]: DEBUG nova.network.neutron [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updated VIF entry in instance network info cache for port 89d7e5cf-c802-47c1-97bd-981796ed50c7. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.530265] env[63379]: DEBUG nova.network.neutron [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updating instance_info_cache with network_info: [{"id": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "address": "fa:16:3e:01:a2:40", "network": {"id": "42ec9777-27c5-4516-be87-12d549df72cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1493935153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e655baa29c4c88b8648d273f92ed4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89d7e5cf-c8", "ovs_interfaceid": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.535659] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1427.535845] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1427.537823] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc44ee51-086e-448a-88d3-27566ab3510e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.547934] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1427.547934] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52efa77d-0a6b-c2a8-1acd-4b89d45fd5d1" [ 1427.547934] env[63379]: _type = "Task" [ 1427.547934] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.562114] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52efa77d-0a6b-c2a8-1acd-4b89d45fd5d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.690209] env[63379]: DEBUG oslo_vmware.api [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779102, 'name': PowerOnVM_Task, 'duration_secs': 1.542092} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.690704] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1427.691009] env[63379]: INFO nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Took 13.35 seconds to spawn the instance on the hypervisor. [ 1427.691286] env[63379]: DEBUG nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1427.692414] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c4c18e-1b5b-4d43-b18b-6d81914ba03f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.723738] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5d22bb-771f-4f1d-b281-7b1b5f2c66fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.758112] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3576bca-eea2-446e-b0ca-6947aed3d363 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.763023] env[63379]: DEBUG nova.network.neutron [-] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.764254] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.786143] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance 'aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1427.847530] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779114, 'name': Rename_Task, 'duration_secs': 0.274175} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.847748] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1427.848011] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5da5d2ad-b907-406e-895b-556cd47acf61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.857376] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1427.857376] env[63379]: value = "task-1779116" [ 1427.857376] env[63379]: _type = "Task" [ 1427.857376] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.864899] env[63379]: DEBUG nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1427.872664] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.927804] env[63379]: DEBUG oslo_vmware.api [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Task: {'id': task-1779115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350204} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.927804] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1427.927955] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1427.928151] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1427.928326] env[63379]: INFO nova.compute.manager [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Took 2.21 seconds to destroy the instance on the hypervisor. [ 1427.929087] env[63379]: DEBUG oslo.service.loopingcall [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.931373] env[63379]: DEBUG nova.compute.manager [-] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1427.931478] env[63379]: DEBUG nova.network.neutron [-] [instance: 0edadcca-042e-440b-985b-6338e20265fa] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1427.951923] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.964264] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247d08f0-97f5-4846-837b-4e4f34c6cac5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.976429] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9eacb5-587c-41f6-9927-b43623217647 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.014457] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d075c72-fc2d-4ed5-8c03-dfaaa4b0a0cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.024742] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a98322-7ec2-4565-af66-9b85a14b4454 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.040529] env[63379]: DEBUG oslo_concurrency.lockutils [req-4e3c0c7e-2afa-45d5-af9e-ca0f9374a8d6 req-4cb86541-b289-4184-9b17-210e26352a65 service nova] Releasing lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.041848] env[63379]: DEBUG nova.compute.provider_tree [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.060377] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52efa77d-0a6b-c2a8-1acd-4b89d45fd5d1, 'name': SearchDatastore_Task, 'duration_secs': 0.011929} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.061248] env[63379]: DEBUG nova.network.neutron [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Successfully updated port: d22964f6-f2df-4a65-9d6f-8ed548989938 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1428.063696] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c204b09b-e23b-4194-8f7f-0617af72e35d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.072464] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1428.072464] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526c7d08-6541-cf2b-136a-6c2721a4a176" [ 1428.072464] env[63379]: _type = "Task" [ 1428.072464] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.085126] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526c7d08-6541-cf2b-136a-6c2721a4a176, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.185833] env[63379]: DEBUG nova.network.neutron [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Successfully created port: d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1428.222628] env[63379]: INFO nova.compute.manager [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Took 32.22 seconds to build instance. [ 1428.266742] env[63379]: INFO nova.compute.manager [-] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Took 1.53 seconds to deallocate network for instance. [ 1428.374658] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779116, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.375370] env[63379]: DEBUG nova.network.neutron [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Port e034314c-72fb-4187-9c6b-1cd2e95aa97a binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1428.508104] env[63379]: DEBUG nova.compute.manager [req-379d9bfc-95b6-4cc4-aa23-12496b1dcb4a req-da384fe9-35be-45fa-97c8-0862800efc3c service nova] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Received event network-vif-deleted-65e3bc3b-bfed-4dd6-be59-87481a211014 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1428.545082] env[63379]: DEBUG nova.scheduler.client.report [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1428.565173] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "refresh_cache-8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.565347] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired lock "refresh_cache-8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.565518] env[63379]: DEBUG nova.network.neutron [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1428.589144] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526c7d08-6541-cf2b-136a-6c2721a4a176, 'name': SearchDatastore_Task, 'duration_secs': 0.024001} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.589419] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.589668] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 41952d7b-ce23-4e9b-8843-bbac1d3099c1/41952d7b-ce23-4e9b-8843-bbac1d3099c1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1428.591028] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2250eb3-7337-4d84-9375-2b8d6cc96695 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.599582] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1428.599582] env[63379]: value = "task-1779117" [ 1428.599582] env[63379]: _type = "Task" [ 1428.599582] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.609431] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779117, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.725943] env[63379]: DEBUG oslo_concurrency.lockutils [None req-871caaeb-ef6d-44c1-81e8-4aed59ceffe1 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.768s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.775193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.875901] env[63379]: DEBUG oslo_vmware.api [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779116, 'name': PowerOnVM_Task, 'duration_secs': 0.875437} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.876855] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1428.876855] env[63379]: INFO nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Took 11.97 seconds to spawn the instance on the hypervisor. [ 1428.876855] env[63379]: DEBUG nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1428.878348] env[63379]: DEBUG nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1428.886266] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20b00c2-90cf-4a47-af42-0c4678101d04 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.909031] env[63379]: DEBUG nova.compute.manager [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Received event network-vif-deleted-120116f2-2c59-4c67-b5ec-2aad96939540 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1428.909031] env[63379]: DEBUG nova.compute.manager [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Received event network-vif-deleted-a524aedc-254a-4394-836b-4136823591d8 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1428.909176] env[63379]: DEBUG nova.compute.manager [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Received event network-vif-plugged-d22964f6-f2df-4a65-9d6f-8ed548989938 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1428.909533] env[63379]: DEBUG oslo_concurrency.lockutils [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] Acquiring lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.911620] env[63379]: DEBUG oslo_concurrency.lockutils [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.911620] env[63379]: DEBUG oslo_concurrency.lockutils [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.911620] env[63379]: DEBUG nova.compute.manager [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] No waiting events found dispatching network-vif-plugged-d22964f6-f2df-4a65-9d6f-8ed548989938 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1428.912459] env[63379]: WARNING nova.compute.manager [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Received unexpected event network-vif-plugged-d22964f6-f2df-4a65-9d6f-8ed548989938 for instance with vm_state building and task_state spawning. [ 1428.912715] env[63379]: DEBUG nova.compute.manager [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Received event network-changed-d22964f6-f2df-4a65-9d6f-8ed548989938 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1428.913145] env[63379]: DEBUG nova.compute.manager [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Refreshing instance network info cache due to event network-changed-d22964f6-f2df-4a65-9d6f-8ed548989938. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1428.913325] env[63379]: DEBUG oslo_concurrency.lockutils [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] Acquiring lock "refresh_cache-8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.926946] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1428.927818] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1428.927818] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1428.927818] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1428.927818] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1428.928012] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1428.928205] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1428.928385] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1428.928578] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1428.928750] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1428.928942] env[63379]: DEBUG nova.virt.hardware [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1428.930306] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7726442f-8c37-4b4f-a90f-5794249abf97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.944285] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45dd833-d92b-4699-996f-7562073db5ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.968376] env[63379]: DEBUG nova.network.neutron [-] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.054716] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.058155] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.393s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.060055] env[63379]: DEBUG nova.objects.instance [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lazy-loading 'resources' on Instance uuid 30908171-e1b9-4e20-830e-419ff6d9a0fa {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1429.098780] env[63379]: INFO nova.scheduler.client.report [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Deleted allocations for instance 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd [ 1429.122255] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779117, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.130470] env[63379]: DEBUG nova.network.neutron [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1429.230281] env[63379]: DEBUG nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1429.391380] env[63379]: DEBUG nova.network.neutron [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Updating instance_info_cache with network_info: [{"id": "d22964f6-f2df-4a65-9d6f-8ed548989938", "address": "fa:16:3e:17:97:ba", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22964f6-f2", "ovs_interfaceid": "d22964f6-f2df-4a65-9d6f-8ed548989938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.408812] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.409097] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.412744] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.425382] env[63379]: INFO nova.compute.manager [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Took 31.95 seconds to build instance. [ 1429.472827] env[63379]: INFO nova.compute.manager [-] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Took 1.54 seconds to deallocate network for instance. [ 1429.614430] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779117, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571633} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.616397] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53229a5f-28ee-41dd-9ab7-b9bb941559d6 tempest-ServerDiagnosticsNegativeTest-302737711 tempest-ServerDiagnosticsNegativeTest-302737711-project-member] Lock "5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.507s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.616397] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 41952d7b-ce23-4e9b-8843-bbac1d3099c1/41952d7b-ce23-4e9b-8843-bbac1d3099c1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1429.616397] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1429.616774] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82004d79-fcf9-4ea6-b5e0-871fb4238b26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.626274] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1429.626274] env[63379]: value = "task-1779119" [ 1429.626274] env[63379]: _type = "Task" [ 1429.626274] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.638233] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.758036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.814273] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.814342] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.814800] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.815583] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.815583] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.821009] env[63379]: INFO nova.compute.manager [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Terminating instance [ 1429.824457] env[63379]: DEBUG nova.compute.manager [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1429.824673] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1429.825630] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e894752a-ad6c-4330-82d4-21fcfabfd9d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.840495] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1429.840947] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ea1442b-5b3f-49ba-a968-b63a90910dd8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.851076] env[63379]: DEBUG oslo_vmware.api [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1429.851076] env[63379]: value = "task-1779120" [ 1429.851076] env[63379]: _type = "Task" [ 1429.851076] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.872647] env[63379]: DEBUG oslo_vmware.api [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.905074] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Releasing lock "refresh_cache-8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.905074] env[63379]: DEBUG nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Instance network_info: |[{"id": "d22964f6-f2df-4a65-9d6f-8ed548989938", "address": "fa:16:3e:17:97:ba", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22964f6-f2", "ovs_interfaceid": "d22964f6-f2df-4a65-9d6f-8ed548989938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1429.905074] env[63379]: DEBUG oslo_concurrency.lockutils [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] Acquired lock "refresh_cache-8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.905074] env[63379]: DEBUG nova.network.neutron [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Refreshing network info cache for port d22964f6-f2df-4a65-9d6f-8ed548989938 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1429.906989] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:97:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd22964f6-f2df-4a65-9d6f-8ed548989938', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1429.916739] env[63379]: DEBUG oslo.service.loopingcall [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.922587] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1429.923844] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-734996f7-781b-4504-8404-b38060cfea3c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.947249] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df20a4ec-9564-4709-adf3-4730cd2eeb91 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.891s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.956808] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1429.956808] env[63379]: value = "task-1779121" [ 1429.956808] env[63379]: _type = "Task" [ 1429.956808] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.977120] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779121, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.980394] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.146682] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089663} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.146999] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1430.148178] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e743c0da-2f65-41e2-81f7-19f4bc816e85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.175855] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 41952d7b-ce23-4e9b-8843-bbac1d3099c1/41952d7b-ce23-4e9b-8843-bbac1d3099c1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1430.180977] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74a5977d-2543-419b-82be-24d48da9d1dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.202986] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e280dd71-6a8c-4cb1-955d-deaa591ddf3f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.214031] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9e870a-f556-4ee2-aa74-257069c00e84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.217398] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1430.217398] env[63379]: value = "task-1779122" [ 1430.217398] env[63379]: _type = "Task" [ 1430.217398] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.254243] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f892731-793c-4158-a4ba-2e1d8a340c4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.260992] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.268896] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8ee454-1622-45f1-8fa0-002341d55537 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.286341] env[63379]: DEBUG nova.compute.provider_tree [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1430.316743] env[63379]: DEBUG nova.network.neutron [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Successfully updated port: d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1430.351270] env[63379]: DEBUG nova.network.neutron [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Updated VIF entry in instance network info cache for port d22964f6-f2df-4a65-9d6f-8ed548989938. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1430.351270] env[63379]: DEBUG nova.network.neutron [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Updating instance_info_cache with network_info: [{"id": "d22964f6-f2df-4a65-9d6f-8ed548989938", "address": "fa:16:3e:17:97:ba", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22964f6-f2", "ovs_interfaceid": "d22964f6-f2df-4a65-9d6f-8ed548989938", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.364890] env[63379]: DEBUG oslo_vmware.api [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779120, 'name': PowerOffVM_Task, 'duration_secs': 0.417717} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.364890] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1430.365096] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1430.365361] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8566f337-1526-49d3-95e0-2324cdb3a28e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.453298] env[63379]: DEBUG nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1430.467192] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1430.467426] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1430.467642] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Deleting the datastore file [datastore1] de671ba9-0d86-4f89-a6bd-ecea9ad0ba85 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1430.472155] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cee883de-16e9-4afe-a4f7-0a7baf3d5fd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.474511] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779121, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.483404] env[63379]: DEBUG oslo_vmware.api [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for the task: (returnval){ [ 1430.483404] env[63379]: value = "task-1779124" [ 1430.483404] env[63379]: _type = "Task" [ 1430.483404] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.493366] env[63379]: DEBUG oslo_vmware.api [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.508930] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.509107] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.510663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.510663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.510663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.514943] env[63379]: INFO nova.compute.manager [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Terminating instance [ 1430.517817] env[63379]: DEBUG nova.compute.manager [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1430.518081] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.518847] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861bd65a-8ebe-4557-b860-3254c8d40b9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.522935] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.522935] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.523152] env[63379]: DEBUG nova.network.neutron [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1430.531855] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1430.532212] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d72abf1b-6647-41e2-9865-5ab7699dcbf7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.541807] env[63379]: DEBUG oslo_vmware.api [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1430.541807] env[63379]: value = "task-1779125" [ 1430.541807] env[63379]: _type = "Task" [ 1430.541807] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.552591] env[63379]: DEBUG oslo_vmware.api [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.734591] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.766830] env[63379]: DEBUG nova.compute.manager [req-0cfd296d-25db-4c63-b1b5-4b0d9dac08ff req-0ca95d22-9a1e-445f-aca8-c7abf2db8bb9 service nova] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Received event network-vif-deleted-fc0b60ef-5c6d-4d2a-9318-02840aeb7595 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1430.790709] env[63379]: DEBUG nova.scheduler.client.report [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1430.819297] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.819297] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquired lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.819433] env[63379]: DEBUG nova.network.neutron [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1430.854889] env[63379]: DEBUG oslo_concurrency.lockutils [req-30268185-f888-4fcb-b78c-897f176246b0 req-6dc486f0-49a3-49ae-839e-2ea3d39e8810 service nova] Releasing lock "refresh_cache-8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.973946] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779121, 'name': CreateVM_Task, 'duration_secs': 0.761545} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.974278] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1430.974915] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.975107] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.975457] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1430.975798] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-affe8bd1-9efe-4beb-a477-72b02046b84c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.983512] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1430.983512] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5291bb74-d633-6058-3642-6c02916c6353" [ 1430.983512] env[63379]: _type = "Task" [ 1430.983512] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.984686] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.997767] env[63379]: DEBUG oslo_vmware.api [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Task: {'id': task-1779124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340417} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.001282] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.001551] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.001721] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.001934] env[63379]: INFO nova.compute.manager [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1431.002257] env[63379]: DEBUG oslo.service.loopingcall [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.002520] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5291bb74-d633-6058-3642-6c02916c6353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.002700] env[63379]: DEBUG nova.compute.manager [-] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1431.002813] env[63379]: DEBUG nova.network.neutron [-] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1431.054151] env[63379]: DEBUG oslo_vmware.api [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779125, 'name': PowerOffVM_Task, 'duration_secs': 0.313786} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.054534] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1431.054734] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1431.055027] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fee6a14-4712-45d4-9e60-46f084cfaea2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.231355] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779122, 'name': ReconfigVM_Task, 'duration_secs': 0.825546} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.232957] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 41952d7b-ce23-4e9b-8843-bbac1d3099c1/41952d7b-ce23-4e9b-8843-bbac1d3099c1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1431.234723] env[63379]: DEBUG nova.compute.manager [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Received event network-vif-plugged-d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1431.234933] env[63379]: DEBUG oslo_concurrency.lockutils [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] Acquiring lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.235160] env[63379]: DEBUG oslo_concurrency.lockutils [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.235376] env[63379]: DEBUG oslo_concurrency.lockutils [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.236071] env[63379]: DEBUG nova.compute.manager [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] No waiting events found dispatching network-vif-plugged-d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1431.236071] env[63379]: WARNING nova.compute.manager [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Received unexpected event network-vif-plugged-d2e80ecc-8309-4e64-b962-762c8535bf0a for instance with vm_state building and task_state spawning. [ 1431.236071] env[63379]: DEBUG nova.compute.manager [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Received event network-changed-d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1431.236071] env[63379]: DEBUG nova.compute.manager [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Refreshing instance network info cache due to event network-changed-d2e80ecc-8309-4e64-b962-762c8535bf0a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1431.236308] env[63379]: DEBUG oslo_concurrency.lockutils [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] Acquiring lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.236572] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e3fd60e-dc03-4d29-9532-dc5a8a6c910d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.247666] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1431.247666] env[63379]: value = "task-1779128" [ 1431.247666] env[63379]: _type = "Task" [ 1431.247666] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.259100] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779128, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.299910] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.242s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.306665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.804s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.309302] env[63379]: INFO nova.compute.claims [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1431.314062] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1431.314389] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1431.314638] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Deleting the datastore file [datastore1] c999d64e-3f5b-4854-8b92-6d0d17f49dd7 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.315456] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8335bb4-8ebc-4746-89c0-bc14be387dbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.332011] env[63379]: DEBUG oslo_vmware.api [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for the task: (returnval){ [ 1431.332011] env[63379]: value = "task-1779129" [ 1431.332011] env[63379]: _type = "Task" [ 1431.332011] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.334299] env[63379]: INFO nova.scheduler.client.report [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Deleted allocations for instance 30908171-e1b9-4e20-830e-419ff6d9a0fa [ 1431.354749] env[63379]: DEBUG oslo_vmware.api [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.401013] env[63379]: DEBUG nova.network.neutron [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1431.498566] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5291bb74-d633-6058-3642-6c02916c6353, 'name': SearchDatastore_Task, 'duration_secs': 0.017538} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.501456] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.501753] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1431.502014] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.502338] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.502432] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.502756] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be3fe591-00c2-49b0-b5f2-66297760bd5d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.514171] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.514476] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1431.515349] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cba9720e-9d9e-4c29-971b-75cb0429f964 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.526564] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1431.526564] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce38dd-cb74-fd2a-5e94-3150a8f19e44" [ 1431.526564] env[63379]: _type = "Task" [ 1431.526564] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.536103] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce38dd-cb74-fd2a-5e94-3150a8f19e44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.544087] env[63379]: DEBUG nova.network.neutron [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [{"id": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "address": "fa:16:3e:d2:92:4e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape034314c-72", "ovs_interfaceid": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.679224] env[63379]: DEBUG nova.network.neutron [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Updating instance_info_cache with network_info: [{"id": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "address": "fa:16:3e:3c:58:f1", "network": {"id": "12566b3a-74ae-4644-915d-0710cc41de61", "bridge": "br-int", "label": "tempest-ServersTestJSON-1681247215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c2d8da8e40749adbe05070135cea8e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e80ecc-83", "ovs_interfaceid": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.765386] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779128, 'name': Rename_Task, 'duration_secs': 0.177397} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.767415] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1431.770225] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b735b03b-16e7-4e19-ab5f-587ffd04ed22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.778927] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1431.778927] env[63379]: value = "task-1779130" [ 1431.778927] env[63379]: _type = "Task" [ 1431.778927] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.794684] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.852377] env[63379]: DEBUG oslo_vmware.api [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Task: {'id': task-1779129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248075} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.853138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5909a1dd-7f47-4cbc-b1bc-f5b57ad661d1 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "30908171-e1b9-4e20-830e-419ff6d9a0fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.413s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.854672] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.854965] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.856033] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.856033] env[63379]: INFO nova.compute.manager [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1431.856033] env[63379]: DEBUG oslo.service.loopingcall [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.856033] env[63379]: DEBUG nova.compute.manager [-] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1431.856312] env[63379]: DEBUG nova.network.neutron [-] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1432.040892] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce38dd-cb74-fd2a-5e94-3150a8f19e44, 'name': SearchDatastore_Task, 'duration_secs': 0.014352} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.040892] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fbd1b21-4565-4ba7-a7eb-e8d8b895f099 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.048300] env[63379]: DEBUG oslo_concurrency.lockutils [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.052868] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1432.052868] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5263f6ac-a701-efbf-5169-fd34018e2bf1" [ 1432.052868] env[63379]: _type = "Task" [ 1432.052868] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.064033] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5263f6ac-a701-efbf-5169-fd34018e2bf1, 'name': SearchDatastore_Task, 'duration_secs': 0.009891} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.064033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.064033] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9/8a7a3a54-ca4f-4860-a976-7d6b1212b9c9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1432.064033] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d5aff03-8782-43eb-a5f2-097ff4057908 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.072032] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1432.072032] env[63379]: value = "task-1779131" [ 1432.072032] env[63379]: _type = "Task" [ 1432.072032] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.084748] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779131, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.181574] env[63379]: DEBUG nova.network.neutron [-] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.183560] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Releasing lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.183560] env[63379]: DEBUG nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Instance network_info: |[{"id": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "address": "fa:16:3e:3c:58:f1", "network": {"id": "12566b3a-74ae-4644-915d-0710cc41de61", "bridge": "br-int", "label": "tempest-ServersTestJSON-1681247215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c2d8da8e40749adbe05070135cea8e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e80ecc-83", "ovs_interfaceid": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1432.183997] env[63379]: DEBUG oslo_concurrency.lockutils [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] Acquired lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.184217] env[63379]: DEBUG nova.network.neutron [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Refreshing network info cache for port d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1432.185224] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:58:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2e80ecc-8309-4e64-b962-762c8535bf0a', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1432.195017] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Creating folder: Project (3c2d8da8e40749adbe05070135cea8e0). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1432.196667] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e11dd3fc-e551-4d33-aae3-3ccbebc13701 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.211517] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Created folder: Project (3c2d8da8e40749adbe05070135cea8e0) in parent group-v369214. [ 1432.211856] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Creating folder: Instances. Parent ref: group-v369287. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1432.212111] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d362fe13-f2f6-4988-9e10-78a6647064ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.227074] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Created folder: Instances in parent group-v369287. [ 1432.227425] env[63379]: DEBUG oslo.service.loopingcall [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.227898] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1432.228156] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a4bb698-c937-4681-9c81-c3a0b9b6a1c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.258730] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1432.258730] env[63379]: value = "task-1779134" [ 1432.258730] env[63379]: _type = "Task" [ 1432.258730] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.268885] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779134, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.290363] env[63379]: DEBUG oslo_vmware.api [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779130, 'name': PowerOnVM_Task, 'duration_secs': 0.485023} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.290649] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1432.290853] env[63379]: INFO nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1432.291048] env[63379]: DEBUG nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1432.291932] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0bdcb8-46c5-407d-bf10-8b4da4c7aa7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.584465] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779131, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.593257] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a806c6b0-a959-4650-a04c-8065b084952a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.618012] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22130302-9006-4c18-bc9e-d9baaba8f882 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.627168] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance 'aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1432.686168] env[63379]: INFO nova.compute.manager [-] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Took 1.68 seconds to deallocate network for instance. [ 1432.774954] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779134, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.822032] env[63379]: INFO nova.compute.manager [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Took 29.95 seconds to build instance. [ 1432.884151] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39b6a44-b20c-46b4-827b-1755521bf9fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.892494] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f56b927-b13b-46c8-855a-874be88274e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.935055] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbd5465-6045-486a-9bf2-b9f92a3f51a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.940631] env[63379]: DEBUG nova.compute.manager [req-57aff0f0-a8bd-4630-b223-02d033b3a1af req-ab4ca658-a38d-431f-b5a6-64b2fc720dd7 service nova] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Received event network-vif-deleted-55f75417-a04f-44de-a21a-20527e069280 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1432.949169] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de044d8-f376-4317-a356-b0f137801523 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.968100] env[63379]: DEBUG nova.compute.provider_tree [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.085086] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779131, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537046} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.085086] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9/8a7a3a54-ca4f-4860-a976-7d6b1212b9c9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1433.085320] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1433.085540] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1d7e72e-f190-4de5-ac83-1a09662bfe6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.091942] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1433.091942] env[63379]: value = "task-1779136" [ 1433.091942] env[63379]: _type = "Task" [ 1433.091942] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.101662] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.139064] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1433.139402] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51e05eea-a8c4-4a91-b496-93a63005076d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.147879] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1433.147879] env[63379]: value = "task-1779137" [ 1433.147879] env[63379]: _type = "Task" [ 1433.147879] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.157166] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.162492] env[63379]: DEBUG nova.network.neutron [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Updated VIF entry in instance network info cache for port d2e80ecc-8309-4e64-b962-762c8535bf0a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1433.162826] env[63379]: DEBUG nova.network.neutron [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Updating instance_info_cache with network_info: [{"id": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "address": "fa:16:3e:3c:58:f1", "network": {"id": "12566b3a-74ae-4644-915d-0710cc41de61", "bridge": "br-int", "label": "tempest-ServersTestJSON-1681247215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c2d8da8e40749adbe05070135cea8e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e80ecc-83", "ovs_interfaceid": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.176822] env[63379]: DEBUG nova.network.neutron [-] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.202407] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.273870] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779134, 'name': CreateVM_Task, 'duration_secs': 0.657777} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.274224] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1433.274896] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.275640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.276091] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1433.276984] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0334ff0-4483-44fe-8238-6e7a5d1368b1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.281876] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1433.281876] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f1622f-a6cf-456f-ce2f-db3377422e1b" [ 1433.281876] env[63379]: _type = "Task" [ 1433.281876] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.292401] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f1622f-a6cf-456f-ce2f-db3377422e1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.327361] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31545581-8efe-4c81-8d8b-de07ffdca242 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.076s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.331584] env[63379]: DEBUG nova.compute.manager [req-3b7cce84-ded3-4a3b-9319-82a14497bb22 req-4a501c57-b164-4ce7-9083-6daa0c69a52e service nova] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Received event network-vif-deleted-c6139085-d9e8-416c-8a48-9c9e3c07eed1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1433.471395] env[63379]: DEBUG nova.scheduler.client.report [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1433.608031] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779136, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062945} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.608355] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1433.609220] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614f81a1-5c85-4b26-ac8e-3ff4f7a7e26c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.637392] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9/8a7a3a54-ca4f-4860-a976-7d6b1212b9c9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1433.638063] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cff661a8-344c-48d6-bc9c-681685d8581d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.666355] env[63379]: DEBUG oslo_concurrency.lockutils [req-2904de3d-59bd-4fc2-88f2-d12d21b565f8 req-d64086f2-a469-4fea-805f-b97f9dd2dfb7 service nova] Releasing lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.666777] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779137, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.668657] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1433.668657] env[63379]: value = "task-1779138" [ 1433.668657] env[63379]: _type = "Task" [ 1433.668657] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.677689] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.682111] env[63379]: INFO nova.compute.manager [-] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Took 1.82 seconds to deallocate network for instance. [ 1433.800033] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f1622f-a6cf-456f-ce2f-db3377422e1b, 'name': SearchDatastore_Task, 'duration_secs': 0.010286} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.800033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.800753] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1433.801038] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.801529] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.801529] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1433.802495] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b6b2316-08cc-4754-ac87-bfe730bd4cac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.814202] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1433.814516] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1433.815392] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80653c3a-60b3-4c9b-a25e-568080625b49 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.824030] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1433.824030] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5264c918-35af-4ca0-681a-c43a0883c789" [ 1433.824030] env[63379]: _type = "Task" [ 1433.824030] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.835393] env[63379]: DEBUG nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1433.839178] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5264c918-35af-4ca0-681a-c43a0883c789, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.984351] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.985036] env[63379]: DEBUG nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1433.989137] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.440s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.989893] env[63379]: DEBUG nova.objects.instance [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1434.164753] env[63379]: DEBUG oslo_vmware.api [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779137, 'name': PowerOnVM_Task, 'duration_secs': 0.608378} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.165058] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1434.165271] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-78ba3926-926c-40ae-b10e-8895114014db tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance 'aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1434.199079] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.199400] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779138, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.338297] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5264c918-35af-4ca0-681a-c43a0883c789, 'name': SearchDatastore_Task, 'duration_secs': 0.012598} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.338990] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-451d3e83-3971-4549-8ce4-0faed6266182 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.351241] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1434.351241] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52006301-d1da-5c5d-b991-4eb018df9451" [ 1434.351241] env[63379]: _type = "Task" [ 1434.351241] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.359573] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52006301-d1da-5c5d-b991-4eb018df9451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.375916] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.491136] env[63379]: DEBUG nova.compute.utils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1434.492777] env[63379]: DEBUG nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1434.492963] env[63379]: DEBUG nova.network.neutron [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1434.567071] env[63379]: DEBUG nova.policy [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd7b40e68ffc04424aee3037219bf82d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '236e837e88c249e394ee55519b66a6a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1434.683389] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779138, 'name': ReconfigVM_Task, 'duration_secs': 0.77898} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.686140] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9/8a7a3a54-ca4f-4860-a976-7d6b1212b9c9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1434.686140] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2a57684-fa06-4620-93ea-bf5b0234dd20 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.694014] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1434.694014] env[63379]: value = "task-1779139" [ 1434.694014] env[63379]: _type = "Task" [ 1434.694014] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.702135] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779139, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.869771] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52006301-d1da-5c5d-b991-4eb018df9451, 'name': SearchDatastore_Task, 'duration_secs': 0.011343} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.870162] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.870377] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 941ac23c-6aa9-4ed1-840a-326423b7cbc0/941ac23c-6aa9-4ed1-840a-326423b7cbc0.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1434.870677] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72c742c6-f503-4ea9-9105-a63f4b6eacf4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.884166] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1434.884166] env[63379]: value = "task-1779140" [ 1434.884166] env[63379]: _type = "Task" [ 1434.884166] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.897538] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.999252] env[63379]: DEBUG nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1435.006457] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b772e7a-1250-422d-80bd-6dd8db0d04ed tempest-ServersAdmin275Test-1628559354 tempest-ServersAdmin275Test-1628559354-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.007946] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.653s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.008137] env[63379]: DEBUG nova.objects.instance [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lazy-loading 'resources' on Instance uuid ae565930-1bbc-4e75-bfc1-25dbcfd2e999 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1435.050187] env[63379]: DEBUG nova.network.neutron [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Successfully created port: 3c1937ec-1f32-4f60-909d-3726888392ea {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1435.209846] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779139, 'name': Rename_Task, 'duration_secs': 0.188476} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.213646] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1435.213805] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8361d94a-37b3-4461-95d8-13bbd87f7799 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.225061] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1435.225061] env[63379]: value = "task-1779142" [ 1435.225061] env[63379]: _type = "Task" [ 1435.225061] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.236650] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.398546] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779140, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50659} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.400023] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 941ac23c-6aa9-4ed1-840a-326423b7cbc0/941ac23c-6aa9-4ed1-840a-326423b7cbc0.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.400023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1435.400023] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b245ca15-9613-440b-a91f-01345760edcf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.408045] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1435.408045] env[63379]: value = "task-1779143" [ 1435.408045] env[63379]: _type = "Task" [ 1435.408045] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.422034] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779143, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.739475] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779142, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.763991] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.764367] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.844584] env[63379]: DEBUG nova.compute.manager [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Received event network-changed-89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1435.844584] env[63379]: DEBUG nova.compute.manager [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Refreshing instance network info cache due to event network-changed-89d7e5cf-c802-47c1-97bd-981796ed50c7. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1435.844880] env[63379]: DEBUG oslo_concurrency.lockutils [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] Acquiring lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.844880] env[63379]: DEBUG oslo_concurrency.lockutils [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] Acquired lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.847031] env[63379]: DEBUG nova.network.neutron [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Refreshing network info cache for port 89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1435.927205] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779143, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083281} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.927572] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1435.928476] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13092997-2378-4280-95fb-0e311d45a2fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.957413] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 941ac23c-6aa9-4ed1-840a-326423b7cbc0/941ac23c-6aa9-4ed1-840a-326423b7cbc0.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.960344] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f27d4cf-531b-4a52-bd6f-cd0ec9f3b800 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.983175] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1435.983175] env[63379]: value = "task-1779144" [ 1435.983175] env[63379]: _type = "Task" [ 1435.983175] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.995805] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779144, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.018123] env[63379]: DEBUG nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1436.055518] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1436.056041] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1436.056041] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1436.056249] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1436.056485] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1436.056702] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1436.057428] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1436.057428] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1436.057662] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1436.057662] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1436.057996] env[63379]: DEBUG nova.virt.hardware [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1436.058937] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6fdcc0-ffa7-4063-94df-ee5f0b0b5fa5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.073109] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b4295f-326f-4090-8840-c8fdc8d4247b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.095924] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7762ecd6-4d08-40af-ac47-15453f7d26e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.103780] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376e6801-1824-410c-b8a8-c2f2da231f3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.135153] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3c949f-2f5f-4222-adaa-8068a9b4f47b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.143790] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36a8e9b-139a-4319-be64-1ab595259e04 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.160601] env[63379]: DEBUG nova.compute.provider_tree [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.240466] env[63379]: DEBUG oslo_vmware.api [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779142, 'name': PowerOnVM_Task, 'duration_secs': 0.704572} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.240770] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1436.240959] env[63379]: INFO nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Took 10.14 seconds to spawn the instance on the hypervisor. [ 1436.241188] env[63379]: DEBUG nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1436.242047] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca440776-04cd-43d6-a7c0-33029272c132 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.493833] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779144, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.663519] env[63379]: DEBUG nova.scheduler.client.report [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1436.685931] env[63379]: DEBUG nova.network.neutron [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updated VIF entry in instance network info cache for port 89d7e5cf-c802-47c1-97bd-981796ed50c7. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1436.687151] env[63379]: DEBUG nova.network.neutron [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updating instance_info_cache with network_info: [{"id": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "address": "fa:16:3e:01:a2:40", "network": {"id": "42ec9777-27c5-4516-be87-12d549df72cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1493935153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e655baa29c4c88b8648d273f92ed4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89d7e5cf-c8", "ovs_interfaceid": "89d7e5cf-c802-47c1-97bd-981796ed50c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.766751] env[63379]: INFO nova.compute.manager [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Took 33.23 seconds to build instance. [ 1436.823618] env[63379]: DEBUG nova.network.neutron [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Successfully updated port: 3c1937ec-1f32-4f60-909d-3726888392ea {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1436.996742] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779144, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.160729] env[63379]: DEBUG nova.compute.manager [req-9ba8005e-2a0e-428a-9590-c185b7719580 req-584a8032-0b6f-41fa-8543-f0340c499459 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Received event network-vif-plugged-3c1937ec-1f32-4f60-909d-3726888392ea {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1437.160976] env[63379]: DEBUG oslo_concurrency.lockutils [req-9ba8005e-2a0e-428a-9590-c185b7719580 req-584a8032-0b6f-41fa-8543-f0340c499459 service nova] Acquiring lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.161229] env[63379]: DEBUG oslo_concurrency.lockutils [req-9ba8005e-2a0e-428a-9590-c185b7719580 req-584a8032-0b6f-41fa-8543-f0340c499459 service nova] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.161443] env[63379]: DEBUG oslo_concurrency.lockutils [req-9ba8005e-2a0e-428a-9590-c185b7719580 req-584a8032-0b6f-41fa-8543-f0340c499459 service nova] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.161635] env[63379]: DEBUG nova.compute.manager [req-9ba8005e-2a0e-428a-9590-c185b7719580 req-584a8032-0b6f-41fa-8543-f0340c499459 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] No waiting events found dispatching network-vif-plugged-3c1937ec-1f32-4f60-909d-3726888392ea {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1437.161841] env[63379]: WARNING nova.compute.manager [req-9ba8005e-2a0e-428a-9590-c185b7719580 req-584a8032-0b6f-41fa-8543-f0340c499459 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Received unexpected event network-vif-plugged-3c1937ec-1f32-4f60-909d-3726888392ea for instance with vm_state building and task_state spawning. [ 1437.179194] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.179461] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.180997] env[63379]: DEBUG nova.compute.manager [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Going to confirm migration 1 {{(pid=63379) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1437.182140] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.174s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.184643] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.737s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.189492] env[63379]: INFO nova.compute.claims [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1437.190101] env[63379]: DEBUG oslo_concurrency.lockutils [req-8eba3d98-32d3-4428-b3b6-2d78a5439bfa req-317a4dcc-0408-49a6-9cab-5a2c8172aea8 service nova] Releasing lock "refresh_cache-41952d7b-ce23-4e9b-8843-bbac1d3099c1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.221045] env[63379]: INFO nova.scheduler.client.report [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Deleted allocations for instance ae565930-1bbc-4e75-bfc1-25dbcfd2e999 [ 1437.239086] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.239351] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.268700] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e3e39eb9-2f2c-478f-a300-7f3ac5325bf7 tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.996s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.327094] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "refresh_cache-76731b1b-af66-441b-8fe4-d5d7e7faf3ca" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.327256] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquired lock "refresh_cache-76731b1b-af66-441b-8fe4-d5d7e7faf3ca" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.327409] env[63379]: DEBUG nova.network.neutron [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.497038] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779144, 'name': ReconfigVM_Task, 'duration_secs': 1.333667} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.498103] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 941ac23c-6aa9-4ed1-840a-326423b7cbc0/941ac23c-6aa9-4ed1-840a-326423b7cbc0.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1437.499534] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5fb54d5e-0f6d-4b36-aa06-bb1bc9bef6c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.509846] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1437.509846] env[63379]: value = "task-1779146" [ 1437.509846] env[63379]: _type = "Task" [ 1437.509846] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.519222] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779146, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.738987] env[63379]: DEBUG oslo_concurrency.lockutils [None req-95583623-9321-4522-be0e-578151886a4d tempest-ServerDiagnosticsV248Test-1842106190 tempest-ServerDiagnosticsV248Test-1842106190-project-member] Lock "ae565930-1bbc-4e75-bfc1-25dbcfd2e999" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.294s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.750515] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea83ea2-636e-4351-be4b-cdb72e4f2309 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.761012] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Suspending the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1437.761356] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-230751a3-ff87-4411-9b34-fb1fb69abb1b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.769970] env[63379]: DEBUG oslo_vmware.api [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] Waiting for the task: (returnval){ [ 1437.769970] env[63379]: value = "task-1779147" [ 1437.769970] env[63379]: _type = "Task" [ 1437.769970] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.775555] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.775857] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.776179] env[63379]: DEBUG nova.network.neutron [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.777976] env[63379]: DEBUG nova.objects.instance [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lazy-loading 'info_cache' on Instance uuid aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1437.778849] env[63379]: DEBUG nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1437.788038] env[63379]: DEBUG oslo_vmware.api [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] Task: {'id': task-1779147, 'name': SuspendVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.892727] env[63379]: DEBUG nova.network.neutron [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1438.027251] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779146, 'name': Rename_Task, 'duration_secs': 0.192704} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.027251] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1438.027584] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d33580d9-558b-42c4-baf0-cca650f94a35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.036613] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1438.036613] env[63379]: value = "task-1779148" [ 1438.036613] env[63379]: _type = "Task" [ 1438.036613] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.046402] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779148, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.116243] env[63379]: DEBUG nova.network.neutron [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Updating instance_info_cache with network_info: [{"id": "3c1937ec-1f32-4f60-909d-3726888392ea", "address": "fa:16:3e:6d:6e:47", "network": {"id": "edc5adbd-e1e2-431b-abaf-063070bd8536", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-299641573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "236e837e88c249e394ee55519b66a6a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1937ec-1f", "ovs_interfaceid": "3c1937ec-1f32-4f60-909d-3726888392ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.281679] env[63379]: DEBUG oslo_vmware.api [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] Task: {'id': task-1779147, 'name': SuspendVM_Task} progress is 62%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.307984] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.552039] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779148, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.619426] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Releasing lock "refresh_cache-76731b1b-af66-441b-8fe4-d5d7e7faf3ca" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.620527] env[63379]: DEBUG nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Instance network_info: |[{"id": "3c1937ec-1f32-4f60-909d-3726888392ea", "address": "fa:16:3e:6d:6e:47", "network": {"id": "edc5adbd-e1e2-431b-abaf-063070bd8536", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-299641573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "236e837e88c249e394ee55519b66a6a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1937ec-1f", "ovs_interfaceid": "3c1937ec-1f32-4f60-909d-3726888392ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1438.620527] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:6e:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c1937ec-1f32-4f60-909d-3726888392ea', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1438.628755] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Creating folder: Project (236e837e88c249e394ee55519b66a6a1). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1438.631750] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccefcf63-1a34-4fe1-8aa6-c8defeb9529b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.647683] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Created folder: Project (236e837e88c249e394ee55519b66a6a1) in parent group-v369214. [ 1438.647683] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Creating folder: Instances. Parent ref: group-v369290. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1438.647819] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da49f9ba-a893-4459-a3f7-2e251bc898e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.664379] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Created folder: Instances in parent group-v369290. [ 1438.664827] env[63379]: DEBUG oslo.service.loopingcall [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1438.665146] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1438.665448] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9130a310-aadc-44e7-acba-027fded63dc4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.683115] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfbf4b3-8e79-4f3f-9e0b-30f0558a9e07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.691708] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc651e20-80df-46e1-924d-78f30f7155f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.697648] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1438.697648] env[63379]: value = "task-1779151" [ 1438.697648] env[63379]: _type = "Task" [ 1438.697648] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.732098] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07d4a67-baf4-423b-8e56-8b11aee03eab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.739328] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779151, 'name': CreateVM_Task} progress is 15%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.745881] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7518be35-c385-4b6d-913b-d58dd0f9c278 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.765819] env[63379]: DEBUG nova.compute.provider_tree [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1438.782851] env[63379]: DEBUG oslo_vmware.api [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] Task: {'id': task-1779147, 'name': SuspendVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.053686] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779148, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.211015] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779151, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.220050] env[63379]: DEBUG nova.network.neutron [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [{"id": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "address": "fa:16:3e:d2:92:4e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape034314c-72", "ovs_interfaceid": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.281522] env[63379]: DEBUG oslo_vmware.api [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] Task: {'id': task-1779147, 'name': SuspendVM_Task, 'duration_secs': 1.058529} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.281522] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Suspended the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1439.281522] env[63379]: DEBUG nova.compute.manager [None req-d2eb75cb-22e5-4350-8add-7d441f74bc93 tempest-ServersAdminNegativeTestJSON-824584002 tempest-ServersAdminNegativeTestJSON-824584002-project-admin] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1439.282000] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e89c56-e314-4bb9-babd-65379b6aa679 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.292057] env[63379]: ERROR nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [req-be977515-fde4-475f-a3c1-9966232d7d38] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-be977515-fde4-475f-a3c1-9966232d7d38"}]} [ 1439.315423] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1439.332286] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1439.332586] env[63379]: DEBUG nova.compute.provider_tree [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1439.346037] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1439.375216] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1439.433402] env[63379]: DEBUG nova.compute.manager [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Received event network-changed-3c1937ec-1f32-4f60-909d-3726888392ea {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1439.433590] env[63379]: DEBUG nova.compute.manager [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Refreshing instance network info cache due to event network-changed-3c1937ec-1f32-4f60-909d-3726888392ea. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1439.434027] env[63379]: DEBUG oslo_concurrency.lockutils [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] Acquiring lock "refresh_cache-76731b1b-af66-441b-8fe4-d5d7e7faf3ca" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.434188] env[63379]: DEBUG oslo_concurrency.lockutils [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] Acquired lock "refresh_cache-76731b1b-af66-441b-8fe4-d5d7e7faf3ca" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.434405] env[63379]: DEBUG nova.network.neutron [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Refreshing network info cache for port 3c1937ec-1f32-4f60-909d-3726888392ea {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.552281] env[63379]: DEBUG oslo_vmware.api [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779148, 'name': PowerOnVM_Task, 'duration_secs': 1.124675} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.552590] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1439.552797] env[63379]: INFO nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Took 10.67 seconds to spawn the instance on the hypervisor. [ 1439.552978] env[63379]: DEBUG nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1439.553778] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac7ba94-ca80-46fc-846e-d41c44748568 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.710970] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779151, 'name': CreateVM_Task, 'duration_secs': 0.772712} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.713568] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1439.714472] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.714660] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.714975] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1439.715386] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3700eca1-b15b-4667-a45f-16f1ee38b8c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.720882] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1439.720882] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524fd2b9-8417-97d9-8d0d-f011abd14442" [ 1439.720882] env[63379]: _type = "Task" [ 1439.720882] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.724708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.725279] env[63379]: DEBUG nova.objects.instance [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lazy-loading 'migration_context' on Instance uuid aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1439.737295] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524fd2b9-8417-97d9-8d0d-f011abd14442, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.823077] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5d43ec-4ab7-47d2-9929-8661eccf2e09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.831987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adf5029-b7d5-4d85-bc7e-83582acb4c9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.865456] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0883f5bb-568b-4932-923b-5c585416d182 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.873943] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9274580f-aba5-498b-a39e-4f941f850a69 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.889105] env[63379]: DEBUG nova.compute.provider_tree [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1440.079937] env[63379]: INFO nova.compute.manager [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Took 34.89 seconds to build instance. [ 1440.228280] env[63379]: DEBUG nova.objects.base [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1440.232823] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15fe782-153f-4ff2-9a24-3566279f506d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.235755] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524fd2b9-8417-97d9-8d0d-f011abd14442, 'name': SearchDatastore_Task, 'duration_secs': 0.033225} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.236090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.236341] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1440.236582] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.236731] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.236913] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1440.237573] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c039996-b5f6-425d-a29d-f028d07f3dc9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.255488] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a8ff512-6bc9-41fc-a71d-a9d8fcc6649b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.257845] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1440.258041] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1440.258721] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21c81e13-0a10-4c41-988c-a3821f9e4bc1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.265057] env[63379]: DEBUG oslo_vmware.api [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1440.265057] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522549ca-01d0-3c42-7f4e-b5d15ba5005b" [ 1440.265057] env[63379]: _type = "Task" [ 1440.265057] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.267127] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1440.267127] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5213be0b-732c-c526-cdc8-274580c41ead" [ 1440.267127] env[63379]: _type = "Task" [ 1440.267127] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.276736] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5213be0b-732c-c526-cdc8-274580c41ead, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.279628] env[63379]: DEBUG oslo_vmware.api [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522549ca-01d0-3c42-7f4e-b5d15ba5005b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.418906] env[63379]: ERROR nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [req-a0887011-9b66-465d-bc9a-a43a760f0392] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a0887011-9b66-465d-bc9a-a43a760f0392"}]} [ 1440.439766] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1440.459201] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1440.459510] env[63379]: DEBUG nova.compute.provider_tree [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1440.478135] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1440.505796] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1440.551596] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.551841] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.552053] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquiring lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.552251] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.584702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9da71dbf-bef0-412c-8812-770ac7e65934 tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.292s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.671425] env[63379]: DEBUG nova.network.neutron [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Updated VIF entry in instance network info cache for port 3c1937ec-1f32-4f60-909d-3726888392ea. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1440.674285] env[63379]: DEBUG nova.network.neutron [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Updating instance_info_cache with network_info: [{"id": "3c1937ec-1f32-4f60-909d-3726888392ea", "address": "fa:16:3e:6d:6e:47", "network": {"id": "edc5adbd-e1e2-431b-abaf-063070bd8536", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-299641573-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "236e837e88c249e394ee55519b66a6a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c1937ec-1f", "ovs_interfaceid": "3c1937ec-1f32-4f60-909d-3726888392ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.781563] env[63379]: DEBUG oslo_vmware.api [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522549ca-01d0-3c42-7f4e-b5d15ba5005b, 'name': SearchDatastore_Task, 'duration_secs': 0.011858} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.786011] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.786416] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5213be0b-732c-c526-cdc8-274580c41ead, 'name': SearchDatastore_Task, 'duration_secs': 0.039956} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.787316] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7404d9a3-fbfd-4b33-a428-0bd3166818d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.794596] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1440.794596] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526ca61b-8894-d22c-36d8-cb68b3d2d772" [ 1440.794596] env[63379]: _type = "Task" [ 1440.794596] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.809403] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526ca61b-8894-d22c-36d8-cb68b3d2d772, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.003814] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e4a988-f1fb-421f-9d6c-8f5706cab605 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.012456] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ba85e3-939b-447a-8cf1-6174bab1db20 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.046557] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f2b5f2-85c6-416e-8f6b-d3c140d0c8a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.057028] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3500ccb0-c5b2-4452-9d92-d35a255fde55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.074463] env[63379]: DEBUG nova.compute.provider_tree [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1441.096016] env[63379]: DEBUG nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1441.178391] env[63379]: DEBUG oslo_concurrency.lockutils [req-744e691a-3cb5-40b1-90b8-8c2b6abdf000 req-8e1932f9-57bb-4c2b-82c9-eda6bbe9c475 service nova] Releasing lock "refresh_cache-76731b1b-af66-441b-8fe4-d5d7e7faf3ca" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.231204] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.231484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.309050] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526ca61b-8894-d22c-36d8-cb68b3d2d772, 'name': SearchDatastore_Task, 'duration_secs': 0.013598} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.309351] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.309620] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 76731b1b-af66-441b-8fe4-d5d7e7faf3ca/76731b1b-af66-441b-8fe4-d5d7e7faf3ca.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1441.310448] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0036f55-b216-4d8e-a2d6-e5aed127f171 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.319543] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1441.319543] env[63379]: value = "task-1779152" [ 1441.319543] env[63379]: _type = "Task" [ 1441.319543] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.333929] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.597328] env[63379]: ERROR nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [req-a2617ade-ab88-4de2-b0ba-c8b5b48769fc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a2617ade-ab88-4de2-b0ba-c8b5b48769fc"}]} [ 1441.621225] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.628699] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1441.652367] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1441.652367] env[63379]: DEBUG nova.compute.provider_tree [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1441.663114] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1441.698243] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1441.836413] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779152, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.236481] env[63379]: DEBUG nova.compute.manager [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Received event network-changed-d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1442.236736] env[63379]: DEBUG nova.compute.manager [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Refreshing instance network info cache due to event network-changed-d2e80ecc-8309-4e64-b962-762c8535bf0a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1442.236903] env[63379]: DEBUG oslo_concurrency.lockutils [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] Acquiring lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.237072] env[63379]: DEBUG oslo_concurrency.lockutils [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] Acquired lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.237244] env[63379]: DEBUG nova.network.neutron [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Refreshing network info cache for port d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1442.240417] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3473a222-e5d1-4b1b-bc30-b8d89856a9de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.249991] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d3b441-a689-46a3-aab9-a4a5b400bbb2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.292793] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1572b273-3165-4e3f-8d6c-004babf83631 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.303304] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cff086-fde9-42f8-a9d6-2d820dfa9dd9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.317636] env[63379]: DEBUG nova.compute.provider_tree [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.332056] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703016} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.332975] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 76731b1b-af66-441b-8fe4-d5d7e7faf3ca/76731b1b-af66-441b-8fe4-d5d7e7faf3ca.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1442.333222] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1442.333473] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49651c8b-76f7-40b1-b154-d73a4fdc9659 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.343057] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1442.343057] env[63379]: value = "task-1779153" [ 1442.343057] env[63379]: _type = "Task" [ 1442.343057] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.357879] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.820608] env[63379]: DEBUG nova.scheduler.client.report [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.854595] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071587} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.854595] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.855473] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.855756] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.856046] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.856208] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.857840] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1442.861251] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1429fb61-6f21-4aab-8a60-ce56daccf1c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.866903] env[63379]: INFO nova.compute.manager [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Terminating instance [ 1442.869020] env[63379]: DEBUG nova.compute.manager [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1442.869281] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1442.870144] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852e51f7-bc61-45c8-9948-4a88bd2074fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.894058] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 76731b1b-af66-441b-8fe4-d5d7e7faf3ca/76731b1b-af66-441b-8fe4-d5d7e7faf3ca.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1442.894998] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f079400-f492-420c-9611-95cacc997a2f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.912305] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1442.915173] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3264b845-07bf-4db1-ac93-3fcd47a23fe8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.920097] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1442.920097] env[63379]: value = "task-1779154" [ 1442.920097] env[63379]: _type = "Task" [ 1442.920097] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.929297] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779154, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.999479] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1443.000124] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1443.000124] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Deleting the datastore file [datastore1] 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1443.000773] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60a6241d-4ef9-4879-8ce9-0ac0cbbb6ba0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.008650] env[63379]: DEBUG oslo_vmware.api [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1443.008650] env[63379]: value = "task-1779156" [ 1443.008650] env[63379]: _type = "Task" [ 1443.008650] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.019703] env[63379]: DEBUG oslo_vmware.api [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779156, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.289811] env[63379]: DEBUG nova.network.neutron [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Updated VIF entry in instance network info cache for port d2e80ecc-8309-4e64-b962-762c8535bf0a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1443.290238] env[63379]: DEBUG nova.network.neutron [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Updating instance_info_cache with network_info: [{"id": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "address": "fa:16:3e:3c:58:f1", "network": {"id": "12566b3a-74ae-4644-915d-0710cc41de61", "bridge": "br-int", "label": "tempest-ServersTestJSON-1681247215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c2d8da8e40749adbe05070135cea8e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4225eb1f-0af4-4ed4-8e3d-de822eb6d4ea", "external-id": "nsx-vlan-transportzone-40", "segmentation_id": 40, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2e80ecc-83", "ovs_interfaceid": "d2e80ecc-8309-4e64-b962-762c8535bf0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.326740] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.142s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.327278] env[63379]: DEBUG nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1443.329911] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.105s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.331563] env[63379]: INFO nova.compute.claims [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1443.430895] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779154, 'name': ReconfigVM_Task, 'duration_secs': 0.307333} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.431224] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 76731b1b-af66-441b-8fe4-d5d7e7faf3ca/76731b1b-af66-441b-8fe4-d5d7e7faf3ca.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1443.431818] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-526f4d63-716c-4407-b444-48e9c9934e8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.440200] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1443.440200] env[63379]: value = "task-1779157" [ 1443.440200] env[63379]: _type = "Task" [ 1443.440200] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.448852] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779157, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.519724] env[63379]: DEBUG oslo_vmware.api [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779156, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272135} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.519724] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1443.519927] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1443.520152] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1443.520349] env[63379]: INFO nova.compute.manager [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1443.520641] env[63379]: DEBUG oslo.service.loopingcall [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1443.520850] env[63379]: DEBUG nova.compute.manager [-] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1443.520952] env[63379]: DEBUG nova.network.neutron [-] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1443.793272] env[63379]: DEBUG oslo_concurrency.lockutils [req-6a18659e-aada-412c-a32f-1c5adf6f3ee2 req-1e66f624-e080-4d01-bfd9-3d65567cee0c service nova] Releasing lock "refresh_cache-941ac23c-6aa9-4ed1-840a-326423b7cbc0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.836153] env[63379]: DEBUG nova.compute.utils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.840385] env[63379]: DEBUG nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1443.840565] env[63379]: DEBUG nova.network.neutron [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1443.891028] env[63379]: DEBUG nova.policy [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '555eb640c94c49f7a474962d124cc5dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b1491caa48e4025a443f85088c99b4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1443.952157] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779157, 'name': Rename_Task, 'duration_secs': 0.141368} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.954557] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.954864] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41bb0043-a9b7-43b9-a512-7e7fc42ba701 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.964782] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1443.964782] env[63379]: value = "task-1779158" [ 1443.964782] env[63379]: _type = "Task" [ 1443.964782] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.974246] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.222832] env[63379]: DEBUG nova.network.neutron [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Successfully created port: f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1444.319016] env[63379]: DEBUG nova.compute.manager [req-aaabdbb4-9af1-4201-b3ed-589a441f9470 req-9f64199c-cfa5-47f1-9949-d023b2fe9735 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Received event network-vif-deleted-d22964f6-f2df-4a65-9d6f-8ed548989938 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1444.319537] env[63379]: INFO nova.compute.manager [req-aaabdbb4-9af1-4201-b3ed-589a441f9470 req-9f64199c-cfa5-47f1-9949-d023b2fe9735 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Neutron deleted interface d22964f6-f2df-4a65-9d6f-8ed548989938; detaching it from the instance and deleting it from the info cache [ 1444.319537] env[63379]: DEBUG nova.network.neutron [req-aaabdbb4-9af1-4201-b3ed-589a441f9470 req-9f64199c-cfa5-47f1-9949-d023b2fe9735 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.341534] env[63379]: DEBUG nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1444.415347] env[63379]: DEBUG nova.network.neutron [-] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.481020] env[63379]: DEBUG oslo_vmware.api [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779158, 'name': PowerOnVM_Task, 'duration_secs': 0.450975} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.481020] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.481020] env[63379]: INFO nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1444.481020] env[63379]: DEBUG nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1444.481507] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cf4b9b-d5e2-438f-8667-fa7adca55802 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.822213] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a187911-6b11-452e-8aed-54796e55fcfd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.840832] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6d82e8-c32b-4fb1-9cfe-5f5b7ea6e2b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.886183] env[63379]: DEBUG nova.compute.manager [req-aaabdbb4-9af1-4201-b3ed-589a441f9470 req-9f64199c-cfa5-47f1-9949-d023b2fe9735 service nova] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Detach interface failed, port_id=d22964f6-f2df-4a65-9d6f-8ed548989938, reason: Instance 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1444.916690] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde4f6a5-32e4-4bce-85b1-00d39b12b49f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.921844] env[63379]: INFO nova.compute.manager [-] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Took 1.40 seconds to deallocate network for instance. [ 1444.929085] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb07160b-56c3-43bc-862d-f79c22212046 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.962932] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65b87bd-fe48-4a2e-9d84-90882e40376e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.971928] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0762f305-527b-4532-998c-d12eef1c027c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.986389] env[63379]: DEBUG nova.compute.provider_tree [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.005405] env[63379]: INFO nova.compute.manager [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Took 31.53 seconds to build instance. [ 1445.365814] env[63379]: DEBUG nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1445.389886] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1445.390351] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1445.390620] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1445.390923] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1445.391189] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1445.391448] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1445.391767] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1445.392334] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1445.392626] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1445.392902] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1445.393220] env[63379]: DEBUG nova.virt.hardware [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1445.394417] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cc04a3-165c-4f2a-8d8a-a20512073c03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.411961] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6805f06-fc1a-4189-86a0-93fd2ff0d6fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.430029] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.489538] env[63379]: DEBUG nova.scheduler.client.report [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1445.507663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b990496d-0e98-4db8-83cb-c96822c6b40c tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.836s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.719241] env[63379]: DEBUG nova.compute.manager [req-3b47db1a-46f7-4ed0-90dc-4174d8a3a314 req-4d003f50-d3bf-4669-874c-be43b6cff5fe service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Received event network-vif-plugged-f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1445.719493] env[63379]: DEBUG oslo_concurrency.lockutils [req-3b47db1a-46f7-4ed0-90dc-4174d8a3a314 req-4d003f50-d3bf-4669-874c-be43b6cff5fe service nova] Acquiring lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.719759] env[63379]: DEBUG oslo_concurrency.lockutils [req-3b47db1a-46f7-4ed0-90dc-4174d8a3a314 req-4d003f50-d3bf-4669-874c-be43b6cff5fe service nova] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.719937] env[63379]: DEBUG oslo_concurrency.lockutils [req-3b47db1a-46f7-4ed0-90dc-4174d8a3a314 req-4d003f50-d3bf-4669-874c-be43b6cff5fe service nova] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.720360] env[63379]: DEBUG nova.compute.manager [req-3b47db1a-46f7-4ed0-90dc-4174d8a3a314 req-4d003f50-d3bf-4669-874c-be43b6cff5fe service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] No waiting events found dispatching network-vif-plugged-f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1445.720576] env[63379]: WARNING nova.compute.manager [req-3b47db1a-46f7-4ed0-90dc-4174d8a3a314 req-4d003f50-d3bf-4669-874c-be43b6cff5fe service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Received unexpected event network-vif-plugged-f559ba57-d459-458a-89b0-a79226abd033 for instance with vm_state building and task_state spawning. [ 1445.871498] env[63379]: DEBUG nova.network.neutron [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Successfully updated port: f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1445.996100] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.996783] env[63379]: DEBUG nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1446.001787] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.282s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.001888] env[63379]: DEBUG nova.objects.instance [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lazy-loading 'resources' on Instance uuid 0aab61e4-c055-4872-973a-20fa6802ec10 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1446.011198] env[63379]: DEBUG nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1446.374422] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.374593] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquired lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.374746] env[63379]: DEBUG nova.network.neutron [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1446.504848] env[63379]: DEBUG nova.compute.utils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.509667] env[63379]: DEBUG nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1446.509943] env[63379]: DEBUG nova.network.neutron [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1446.541899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.563214] env[63379]: DEBUG nova.policy [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '436288a525ed4c6d90f7f5ab425ede2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6d10c9c1e964532945ff3157ebaaa4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1446.663653] env[63379]: DEBUG nova.compute.manager [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1446.664717] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f43fed5-5dba-4dac-a3ec-68c3fc04e588 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.916565] env[63379]: DEBUG nova.network.neutron [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1446.942234] env[63379]: DEBUG nova.network.neutron [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Successfully created port: b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.010868] env[63379]: DEBUG nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1447.122948] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457a61ea-a0fe-40a6-91a5-d37dc2d9e4fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.133392] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746e3363-fac0-409b-a863-5361d882cdf4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.175062] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a20b08-0466-4ae5-a179-34a9792f1114 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.184264] env[63379]: INFO nova.compute.manager [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] instance snapshotting [ 1447.187440] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0102b2ea-630a-4852-b4cb-6dd50192bab8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.194242] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a64339-4ee7-4885-908d-9726aa17d3d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.199966] env[63379]: DEBUG nova.network.neutron [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Updating instance_info_cache with network_info: [{"id": "f559ba57-d459-458a-89b0-a79226abd033", "address": "fa:16:3e:7e:71:a4", "network": {"id": "d52a0ddf-ba39-4c52-9224-86bdc3e637b4", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358466237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1491caa48e4025a443f85088c99b4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf559ba57-d4", "ovs_interfaceid": "f559ba57-d459-458a-89b0-a79226abd033", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.216503] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Releasing lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.216828] env[63379]: DEBUG nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Instance network_info: |[{"id": "f559ba57-d459-458a-89b0-a79226abd033", "address": "fa:16:3e:7e:71:a4", "network": {"id": "d52a0ddf-ba39-4c52-9224-86bdc3e637b4", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358466237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1491caa48e4025a443f85088c99b4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf559ba57-d4", "ovs_interfaceid": "f559ba57-d459-458a-89b0-a79226abd033", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1447.218078] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1be34c8-36c0-4020-8304-5fc32d21bcad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.221422] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:71:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc845e3-654b-43c6-acea-dde1084f0ad0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f559ba57-d459-458a-89b0-a79226abd033', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1447.228827] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Creating folder: Project (1b1491caa48e4025a443f85088c99b4c). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1447.239916] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a45a6c86-1158-40f5-8c1e-3986a00a82c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.241249] env[63379]: DEBUG nova.compute.provider_tree [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.254179] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Created folder: Project (1b1491caa48e4025a443f85088c99b4c) in parent group-v369214. [ 1447.254415] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Creating folder: Instances. Parent ref: group-v369293. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1447.254682] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a25ae9fc-1d51-47c6-9829-e19906c3d667 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.266593] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Created folder: Instances in parent group-v369293. [ 1447.266911] env[63379]: DEBUG oslo.service.loopingcall [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.267126] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1447.267896] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23156ab2-684d-412f-9682-4953dc06667c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.289103] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1447.289103] env[63379]: value = "task-1779161" [ 1447.289103] env[63379]: _type = "Task" [ 1447.289103] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.298411] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779161, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.746349] env[63379]: DEBUG nova.scheduler.client.report [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1447.752598] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1447.752729] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-df14d596-2227-4f98-80e4-9504212e0747 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.762113] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1447.762113] env[63379]: value = "task-1779162" [ 1447.762113] env[63379]: _type = "Task" [ 1447.762113] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.775917] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779162, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.804530] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779161, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.806237] env[63379]: DEBUG nova.compute.manager [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Received event network-changed-f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1447.806237] env[63379]: DEBUG nova.compute.manager [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Refreshing instance network info cache due to event network-changed-f559ba57-d459-458a-89b0-a79226abd033. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1447.806237] env[63379]: DEBUG oslo_concurrency.lockutils [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] Acquiring lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.806613] env[63379]: DEBUG oslo_concurrency.lockutils [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] Acquired lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.806613] env[63379]: DEBUG nova.network.neutron [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Refreshing network info cache for port f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1447.995081] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.995373] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.028160] env[63379]: DEBUG nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1448.049988] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1448.050270] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1448.050435] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1448.050625] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1448.050777] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1448.050926] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1448.051160] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1448.051337] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1448.051486] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1448.051697] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1448.051872] env[63379]: DEBUG nova.virt.hardware [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1448.052803] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cfd615-47c6-4746-8f00-b54fe33d9971 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.061289] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5d6886-9d23-4571-88e6-460702b099ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.254031] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.257384] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.832s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.257384] env[63379]: DEBUG nova.objects.instance [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lazy-loading 'resources' on Instance uuid a6f7c217-a493-403d-b776-870df4575f2a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.272845] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779162, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.279369] env[63379]: INFO nova.scheduler.client.report [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted allocations for instance 0aab61e4-c055-4872-973a-20fa6802ec10 [ 1448.301245] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779161, 'name': CreateVM_Task, 'duration_secs': 0.617829} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.301417] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1448.302089] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.302269] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.302591] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1448.303146] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff75cb73-59d5-47a1-9086-43e3c4a01be1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.308483] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1448.308483] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527408aa-4497-f31c-5a44-4a3ace87354a" [ 1448.308483] env[63379]: _type = "Task" [ 1448.308483] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.320064] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527408aa-4497-f31c-5a44-4a3ace87354a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.559763] env[63379]: DEBUG nova.network.neutron [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Updated VIF entry in instance network info cache for port f559ba57-d459-458a-89b0-a79226abd033. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1448.560204] env[63379]: DEBUG nova.network.neutron [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Updating instance_info_cache with network_info: [{"id": "f559ba57-d459-458a-89b0-a79226abd033", "address": "fa:16:3e:7e:71:a4", "network": {"id": "d52a0ddf-ba39-4c52-9224-86bdc3e637b4", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358466237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1491caa48e4025a443f85088c99b4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf559ba57-d4", "ovs_interfaceid": "f559ba57-d459-458a-89b0-a79226abd033", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.719901] env[63379]: DEBUG nova.network.neutron [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Successfully updated port: b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1448.773365] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779162, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.786986] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b4fba400-c0db-485d-8309-ba424930d6ef tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "0aab61e4-c055-4872-973a-20fa6802ec10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.520s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.825912] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527408aa-4497-f31c-5a44-4a3ace87354a, 'name': SearchDatastore_Task, 'duration_secs': 0.010177} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.826347] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.826598] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1448.826834] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.826981] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.827174] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1448.827449] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b521ee2b-0187-4b23-a328-5bf458a7e694 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.839710] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1448.840545] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1448.843198] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ff40a94-0e06-43a6-9e55-ce8412b759ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.850192] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1448.850192] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5251c920-51fe-012e-8ae5-0a88409f81c9" [ 1448.850192] env[63379]: _type = "Task" [ 1448.850192] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.862098] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5251c920-51fe-012e-8ae5-0a88409f81c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.063522] env[63379]: DEBUG oslo_concurrency.lockutils [req-19f9bfad-3c0a-4ee4-bafd-4828df64a2a7 req-113fa3d0-27ef-4489-aaae-3215adfb16f5 service nova] Releasing lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.174022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5bec0a-9bbb-4644-8ed5-8c197721957d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.190614] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea990a0b-70c2-480e-8084-8dbafaa5d37a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.223305] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.223466] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquired lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.223566] env[63379]: DEBUG nova.network.neutron [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.225439] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbcd1bb-ac98-464a-a9f0-0c71a33fbd77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.234787] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a6c24d-8993-4c70-9c35-bc9685a9cfd3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.249389] env[63379]: DEBUG nova.compute.provider_tree [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.273251] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779162, 'name': CreateSnapshot_Task, 'duration_secs': 1.180947} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.273515] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1449.274256] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6182752-a4b3-4b74-b87a-30559e1cecc0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.360925] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5251c920-51fe-012e-8ae5-0a88409f81c9, 'name': SearchDatastore_Task, 'duration_secs': 0.012053} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.361729] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1d33704-9993-473e-8b09-23e7f94854cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.367269] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1449.367269] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528953ac-ed7c-16ec-4099-f9c1c4a1a12a" [ 1449.367269] env[63379]: _type = "Task" [ 1449.367269] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.374881] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528953ac-ed7c-16ec-4099-f9c1c4a1a12a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.753772] env[63379]: DEBUG nova.scheduler.client.report [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1449.766834] env[63379]: DEBUG nova.network.neutron [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1449.792443] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1449.793294] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-02bc22af-3d24-4b10-be7e-fa185858f111 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.806796] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1449.806796] env[63379]: value = "task-1779163" [ 1449.806796] env[63379]: _type = "Task" [ 1449.806796] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.816853] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779163, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.873765] env[63379]: DEBUG nova.compute.manager [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Received event network-vif-plugged-b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1449.874655] env[63379]: DEBUG oslo_concurrency.lockutils [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] Acquiring lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.874904] env[63379]: DEBUG oslo_concurrency.lockutils [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.876434] env[63379]: DEBUG oslo_concurrency.lockutils [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.876669] env[63379]: DEBUG nova.compute.manager [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] No waiting events found dispatching network-vif-plugged-b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1449.876857] env[63379]: WARNING nova.compute.manager [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Received unexpected event network-vif-plugged-b54d5849-e50d-4f42-922f-70d18e44b988 for instance with vm_state building and task_state spawning. [ 1449.877037] env[63379]: DEBUG nova.compute.manager [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Received event network-changed-b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1449.877217] env[63379]: DEBUG nova.compute.manager [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Refreshing instance network info cache due to event network-changed-b54d5849-e50d-4f42-922f-70d18e44b988. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1449.877436] env[63379]: DEBUG oslo_concurrency.lockutils [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] Acquiring lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.884615] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528953ac-ed7c-16ec-4099-f9c1c4a1a12a, 'name': SearchDatastore_Task, 'duration_secs': 0.009686} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.884899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.885190] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d2f5b406-3d0e-4150-aeaf-7cdacbc12c06/d2f5b406-3d0e-4150-aeaf-7cdacbc12c06.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1449.885462] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06ca3beb-b831-4e4a-8060-90bda07721ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.892691] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1449.892691] env[63379]: value = "task-1779164" [ 1449.892691] env[63379]: _type = "Task" [ 1449.892691] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.902506] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.960695] env[63379]: DEBUG nova.network.neutron [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Updating instance_info_cache with network_info: [{"id": "b54d5849-e50d-4f42-922f-70d18e44b988", "address": "fa:16:3e:59:4a:db", "network": {"id": "46d4434c-021e-45e1-8971-650440e8abdc", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-551179399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6d10c9c1e964532945ff3157ebaaa4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb54d5849-e5", "ovs_interfaceid": "b54d5849-e50d-4f42-922f-70d18e44b988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.262837] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.266212] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.575s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.267127] env[63379]: INFO nova.compute.claims [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1450.289163] env[63379]: INFO nova.scheduler.client.report [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Deleted allocations for instance a6f7c217-a493-403d-b776-870df4575f2a [ 1450.325831] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779163, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.407138] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779164, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.464152] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Releasing lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.464523] env[63379]: DEBUG nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Instance network_info: |[{"id": "b54d5849-e50d-4f42-922f-70d18e44b988", "address": "fa:16:3e:59:4a:db", "network": {"id": "46d4434c-021e-45e1-8971-650440e8abdc", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-551179399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6d10c9c1e964532945ff3157ebaaa4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb54d5849-e5", "ovs_interfaceid": "b54d5849-e50d-4f42-922f-70d18e44b988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1450.464860] env[63379]: DEBUG oslo_concurrency.lockutils [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] Acquired lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.465058] env[63379]: DEBUG nova.network.neutron [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Refreshing network info cache for port b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.466300] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:4a:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ff3ecd2f-0b10-4faf-a512-fd7a20c28df1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b54d5849-e50d-4f42-922f-70d18e44b988', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1450.473848] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Creating folder: Project (b6d10c9c1e964532945ff3157ebaaa4e). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1450.477200] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b7738d0-acf5-44c3-b504-afec888cf27d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.490308] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Created folder: Project (b6d10c9c1e964532945ff3157ebaaa4e) in parent group-v369214. [ 1450.490520] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Creating folder: Instances. Parent ref: group-v369298. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1450.490760] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c668df69-6f53-45d3-9c92-ebc13fcf20d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.500703] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Created folder: Instances in parent group-v369298. [ 1450.500968] env[63379]: DEBUG oslo.service.loopingcall [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.501186] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1450.501397] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a8917f7-1a24-495a-8bae-61d2aa6599bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.524549] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1450.524549] env[63379]: value = "task-1779167" [ 1450.524549] env[63379]: _type = "Task" [ 1450.524549] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.534870] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779167, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.720257] env[63379]: DEBUG nova.network.neutron [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Updated VIF entry in instance network info cache for port b54d5849-e50d-4f42-922f-70d18e44b988. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1450.720257] env[63379]: DEBUG nova.network.neutron [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Updating instance_info_cache with network_info: [{"id": "b54d5849-e50d-4f42-922f-70d18e44b988", "address": "fa:16:3e:59:4a:db", "network": {"id": "46d4434c-021e-45e1-8971-650440e8abdc", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-551179399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6d10c9c1e964532945ff3157ebaaa4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb54d5849-e5", "ovs_interfaceid": "b54d5849-e50d-4f42-922f-70d18e44b988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.797129] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e19ea006-6e94-42e0-9238-71b6ae829c2e tempest-ServersAdmin275Test-1762012693 tempest-ServersAdmin275Test-1762012693-project-member] Lock "a6f7c217-a493-403d-b776-870df4575f2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.364s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.819157] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779163, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.903593] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533817} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.903870] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d2f5b406-3d0e-4150-aeaf-7cdacbc12c06/d2f5b406-3d0e-4150-aeaf-7cdacbc12c06.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1450.904097] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1450.904407] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5c806af-3f80-42eb-8d1b-4e9f2575850c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.914934] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1450.914934] env[63379]: value = "task-1779168" [ 1450.914934] env[63379]: _type = "Task" [ 1450.914934] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.921738] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.035013] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779167, 'name': CreateVM_Task, 'duration_secs': 0.332989} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.035487] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1451.036320] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.036629] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.039018] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1451.039018] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62bc960-5d32-4f98-90d3-b9c9a2b82134 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.042827] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1451.042827] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fe683d-b6b7-78ff-aca8-93f65d1b7821" [ 1451.042827] env[63379]: _type = "Task" [ 1451.042827] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.052839] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fe683d-b6b7-78ff-aca8-93f65d1b7821, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.222486] env[63379]: DEBUG oslo_concurrency.lockutils [req-acb5d3cd-ff8e-4d5b-aed4-1434532ba1a3 req-7909c248-e0c4-4158-9768-cd23d8715070 service nova] Releasing lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.318741] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779163, 'name': CloneVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.423506] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074496} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.425892] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1451.427097] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5a1acb-391f-4c81-b995-ef38bcc671e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.451412] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] d2f5b406-3d0e-4150-aeaf-7cdacbc12c06/d2f5b406-3d0e-4150-aeaf-7cdacbc12c06.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1451.454184] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d15db00-3053-4a05-bb03-d54273481721 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.474871] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1451.474871] env[63379]: value = "task-1779169" [ 1451.474871] env[63379]: _type = "Task" [ 1451.474871] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.488148] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.556896] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fe683d-b6b7-78ff-aca8-93f65d1b7821, 'name': SearchDatastore_Task, 'duration_secs': 0.010319} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.557243] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.557528] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1451.557791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.558025] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.558266] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1451.558557] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd37eb9d-b416-4dd5-b4aa-d5079091e0a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.570755] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1451.570976] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1451.571762] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a53df50-c70a-49a1-957a-e4bfcd870355 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.580696] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1451.580696] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526a30d3-4de1-3c34-ecd7-321b09921cdf" [ 1451.580696] env[63379]: _type = "Task" [ 1451.580696] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.591331] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526a30d3-4de1-3c34-ecd7-321b09921cdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.702374] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cf8adc-8370-4f7e-959e-506006ccce8f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.710540] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0db61d-1806-4d31-a30c-ef59b36d4dde {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.741636] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68639a96-9662-452e-b688-50397f6da224 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.749919] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3773987-64da-41c8-869f-f59ce8f8901c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.764460] env[63379]: DEBUG nova.compute.provider_tree [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.820688] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779163, 'name': CloneVM_Task, 'duration_secs': 1.531993} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.820972] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Created linked-clone VM from snapshot [ 1451.821837] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040689d3-6253-4977-9cce-1a0b11c274c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.830236] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Uploading image f2b0f428-9e21-4972-9c70-84b0e00c7270 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1451.855368] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1451.855368] env[63379]: value = "vm-369297" [ 1451.855368] env[63379]: _type = "VirtualMachine" [ 1451.855368] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1451.855552] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9010f113-154d-495a-8392-ceb3d45b7b58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.865231] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lease: (returnval){ [ 1451.865231] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207858a-47f2-7793-9486-8f290831ffd3" [ 1451.865231] env[63379]: _type = "HttpNfcLease" [ 1451.865231] env[63379]: } obtained for exporting VM: (result){ [ 1451.865231] env[63379]: value = "vm-369297" [ 1451.865231] env[63379]: _type = "VirtualMachine" [ 1451.865231] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1451.865231] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the lease: (returnval){ [ 1451.865231] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207858a-47f2-7793-9486-8f290831ffd3" [ 1451.865231] env[63379]: _type = "HttpNfcLease" [ 1451.865231] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1451.870994] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1451.870994] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207858a-47f2-7793-9486-8f290831ffd3" [ 1451.870994] env[63379]: _type = "HttpNfcLease" [ 1451.870994] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1451.988330] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779169, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.092780] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526a30d3-4de1-3c34-ecd7-321b09921cdf, 'name': SearchDatastore_Task, 'duration_secs': 0.010044} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.093758] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dbf6afb-57ca-43ca-9780-c3e6028ed23d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.099454] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1452.099454] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ecbc22-94cb-9a96-fc78-2fcc2eba3017" [ 1452.099454] env[63379]: _type = "Task" [ 1452.099454] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.108317] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ecbc22-94cb-9a96-fc78-2fcc2eba3017, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.268137] env[63379]: DEBUG nova.scheduler.client.report [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1452.374180] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1452.374180] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207858a-47f2-7793-9486-8f290831ffd3" [ 1452.374180] env[63379]: _type = "HttpNfcLease" [ 1452.374180] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1452.374605] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1452.374605] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207858a-47f2-7793-9486-8f290831ffd3" [ 1452.374605] env[63379]: _type = "HttpNfcLease" [ 1452.374605] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1452.375551] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b745e9d-3269-4f39-ac2d-4d9813fdc42c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.384337] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b329e4-7901-7537-3458-06af5f4f07b0/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1452.384629] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b329e4-7901-7537-3458-06af5f4f07b0/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1452.486996] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779169, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.527771] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-75b88882-8424-4408-a537-92a2b826ccbb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.611343] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ecbc22-94cb-9a96-fc78-2fcc2eba3017, 'name': SearchDatastore_Task, 'duration_secs': 0.009985} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.611628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.611890] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 08465a2c-1ab6-4c53-9b12-3cd51c717b03/08465a2c-1ab6-4c53-9b12-3cd51c717b03.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1452.612181] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73aeafce-a771-457e-9564-aaebc8d66fca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.620425] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1452.620425] env[63379]: value = "task-1779171" [ 1452.620425] env[63379]: _type = "Task" [ 1452.620425] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.629317] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.773400] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.775478] env[63379]: DEBUG nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1452.778945] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.168s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.780503] env[63379]: INFO nova.compute.claims [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1452.994216] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779169, 'name': ReconfigVM_Task, 'duration_secs': 1.244714} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.994605] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Reconfigured VM instance instance-0000001a to attach disk [datastore1] d2f5b406-3d0e-4150-aeaf-7cdacbc12c06/d2f5b406-3d0e-4150-aeaf-7cdacbc12c06.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1452.995349] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6841ff0f-f3fe-44e4-a9c8-739cfbccee9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.006847] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1453.006847] env[63379]: value = "task-1779172" [ 1453.006847] env[63379]: _type = "Task" [ 1453.006847] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.023785] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779172, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.134739] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779171, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.289785] env[63379]: DEBUG nova.compute.utils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1453.293110] env[63379]: DEBUG nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1453.293110] env[63379]: DEBUG nova.network.neutron [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1453.387759] env[63379]: DEBUG nova.policy [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9162483675d540dfb8551206627b50e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '767980ba969142098ccbdf031f6e62a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1453.520650] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779172, 'name': Rename_Task, 'duration_secs': 0.239561} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.520855] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1453.522008] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc9a716e-677a-4314-843b-a12147645da0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.532014] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1453.532014] env[63379]: value = "task-1779173" [ 1453.532014] env[63379]: _type = "Task" [ 1453.532014] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.539252] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779173, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.636677] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521341} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.636829] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 08465a2c-1ab6-4c53-9b12-3cd51c717b03/08465a2c-1ab6-4c53-9b12-3cd51c717b03.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1453.637209] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1453.637678] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f617e05c-7e35-4a62-b429-8b8e410f4e1f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.645977] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1453.645977] env[63379]: value = "task-1779174" [ 1453.645977] env[63379]: _type = "Task" [ 1453.645977] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.658079] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.798292] env[63379]: DEBUG nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1453.973718] env[63379]: DEBUG nova.network.neutron [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Successfully created port: 58a83e9a-4269-4e0e-8eb8-2d5b517e605f {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1454.041870] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779173, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.158649] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083065} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.159047] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.159944] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8076bbf6-7cfe-4add-87d6-a17ca1cc08cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.194919] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 08465a2c-1ab6-4c53-9b12-3cd51c717b03/08465a2c-1ab6-4c53-9b12-3cd51c717b03.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.197884] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c15102a-07f8-48de-bd6c-7553a9212a05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.222338] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1454.222338] env[63379]: value = "task-1779175" [ 1454.222338] env[63379]: _type = "Task" [ 1454.222338] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.236016] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779175, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.400278] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01b1dde-a33b-4b9a-9063-3b9c2d9c8109 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.409615] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3584b4d5-f559-48a2-8d05-bc0dd299becb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.447708] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e400b03b-e54d-484f-9de9-434e532c84b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.457336] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597957d7-a2f4-45e6-be49-417b06d15268 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.473917] env[63379]: DEBUG nova.compute.provider_tree [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1454.547413] env[63379]: DEBUG oslo_vmware.api [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779173, 'name': PowerOnVM_Task, 'duration_secs': 0.712456} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.547742] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.548138] env[63379]: INFO nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Took 9.18 seconds to spawn the instance on the hypervisor. [ 1454.548243] env[63379]: DEBUG nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1454.549922] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887f9bec-1f12-4433-bf68-4b67f98efedb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.731857] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779175, 'name': ReconfigVM_Task, 'duration_secs': 0.34007} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.732169] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 08465a2c-1ab6-4c53-9b12-3cd51c717b03/08465a2c-1ab6-4c53-9b12-3cd51c717b03.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1454.733474] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-befe34b5-8103-4419-9c4a-776a7f27efe9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.740253] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1454.740253] env[63379]: value = "task-1779176" [ 1454.740253] env[63379]: _type = "Task" [ 1454.740253] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.749039] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779176, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.813011] env[63379]: DEBUG nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1454.839942] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1454.839942] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1454.839942] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1454.839942] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1454.840191] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1454.840220] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1454.841119] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1454.841119] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1454.841119] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1454.841119] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1454.841293] env[63379]: DEBUG nova.virt.hardware [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1454.842259] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a109b44-86fe-4c03-bed7-e52d4915bdca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.853483] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c6c985-c7e3-46e1-b0e6-a50f33291584 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.978356] env[63379]: DEBUG nova.scheduler.client.report [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1455.075970] env[63379]: INFO nova.compute.manager [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Took 40.65 seconds to build instance. [ 1455.254367] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779176, 'name': Rename_Task, 'duration_secs': 0.147221} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.254747] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1455.255083] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6dd180e-9393-45f3-be70-3449c511fb3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.264211] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1455.264211] env[63379]: value = "task-1779177" [ 1455.264211] env[63379]: _type = "Task" [ 1455.264211] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.275415] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.484837] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.485559] env[63379]: DEBUG nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1455.489248] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.503s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.490126] env[63379]: DEBUG nova.objects.instance [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lazy-loading 'resources' on Instance uuid d221329b-eee4-42f5-bb27-cf6af0386c04 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1455.582088] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d77cc5d3-644b-438e-94cb-18d8c9ba65f0 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.095s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.776871] env[63379]: DEBUG oslo_vmware.api [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779177, 'name': PowerOnVM_Task, 'duration_secs': 0.499186} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.778226] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1455.778655] env[63379]: INFO nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1455.779510] env[63379]: DEBUG nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1455.782190] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579b41cc-1136-41b2-b2ed-71d9b09033c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.896458] env[63379]: DEBUG nova.network.neutron [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Successfully updated port: 58a83e9a-4269-4e0e-8eb8-2d5b517e605f {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.921346] env[63379]: DEBUG nova.compute.manager [req-1ba69c5c-af7a-4ca6-a4f7-9ca923ecbf47 req-ef41242d-d127-4380-8103-a608cbd7c3d3 service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Received event network-vif-plugged-58a83e9a-4269-4e0e-8eb8-2d5b517e605f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1455.921346] env[63379]: DEBUG oslo_concurrency.lockutils [req-1ba69c5c-af7a-4ca6-a4f7-9ca923ecbf47 req-ef41242d-d127-4380-8103-a608cbd7c3d3 service nova] Acquiring lock "48c0d20e-adc4-40a9-888c-ffea363f6edb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.921346] env[63379]: DEBUG oslo_concurrency.lockutils [req-1ba69c5c-af7a-4ca6-a4f7-9ca923ecbf47 req-ef41242d-d127-4380-8103-a608cbd7c3d3 service nova] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.921346] env[63379]: DEBUG oslo_concurrency.lockutils [req-1ba69c5c-af7a-4ca6-a4f7-9ca923ecbf47 req-ef41242d-d127-4380-8103-a608cbd7c3d3 service nova] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.921948] env[63379]: DEBUG nova.compute.manager [req-1ba69c5c-af7a-4ca6-a4f7-9ca923ecbf47 req-ef41242d-d127-4380-8103-a608cbd7c3d3 service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] No waiting events found dispatching network-vif-plugged-58a83e9a-4269-4e0e-8eb8-2d5b517e605f {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1455.922165] env[63379]: WARNING nova.compute.manager [req-1ba69c5c-af7a-4ca6-a4f7-9ca923ecbf47 req-ef41242d-d127-4380-8103-a608cbd7c3d3 service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Received unexpected event network-vif-plugged-58a83e9a-4269-4e0e-8eb8-2d5b517e605f for instance with vm_state building and task_state spawning. [ 1455.996043] env[63379]: DEBUG nova.compute.utils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1455.996043] env[63379]: DEBUG nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1455.996043] env[63379]: DEBUG nova.network.neutron [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1456.005197] env[63379]: DEBUG nova.compute.manager [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Received event network-changed-f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1456.005519] env[63379]: DEBUG nova.compute.manager [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Refreshing instance network info cache due to event network-changed-f559ba57-d459-458a-89b0-a79226abd033. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1456.005859] env[63379]: DEBUG oslo_concurrency.lockutils [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] Acquiring lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1456.006121] env[63379]: DEBUG oslo_concurrency.lockutils [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] Acquired lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.006400] env[63379]: DEBUG nova.network.neutron [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Refreshing network info cache for port f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1456.084791] env[63379]: DEBUG nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1456.089084] env[63379]: DEBUG nova.policy [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae201bf75acb480196f69cddc0f47523', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e17ea72d033544159bbaea7365a7f221', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1456.306049] env[63379]: INFO nova.compute.manager [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Took 41.10 seconds to build instance. [ 1456.416476] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "refresh_cache-48c0d20e-adc4-40a9-888c-ffea363f6edb" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1456.416603] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "refresh_cache-48c0d20e-adc4-40a9-888c-ffea363f6edb" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.416779] env[63379]: DEBUG nova.network.neutron [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1456.507151] env[63379]: DEBUG nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1456.528491] env[63379]: DEBUG nova.network.neutron [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Successfully created port: 01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1456.631176] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.631539] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0301f8c7-397f-422b-942d-df0cf07c3312 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.641982] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f077f907-7347-4dbf-be79-b42fcf530f69 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.678213] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905e6993-aed5-4a22-a86e-30803eb31bc2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.689760] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114c1b6c-ea74-46ac-99a6-8f617ad34d6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.713985] env[63379]: DEBUG nova.compute.provider_tree [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1456.810061] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8047dda1-33b0-4ff4-bc82-d97558f8bb52 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.618s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.977808] env[63379]: DEBUG nova.network.neutron [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1457.113689] env[63379]: DEBUG nova.network.neutron [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Updated VIF entry in instance network info cache for port f559ba57-d459-458a-89b0-a79226abd033. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1457.113689] env[63379]: DEBUG nova.network.neutron [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Updating instance_info_cache with network_info: [{"id": "f559ba57-d459-458a-89b0-a79226abd033", "address": "fa:16:3e:7e:71:a4", "network": {"id": "d52a0ddf-ba39-4c52-9224-86bdc3e637b4", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1358466237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b1491caa48e4025a443f85088c99b4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc845e3-654b-43c6-acea-dde1084f0ad0", "external-id": "nsx-vlan-transportzone-344", "segmentation_id": 344, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf559ba57-d4", "ovs_interfaceid": "f559ba57-d459-458a-89b0-a79226abd033", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.222022] env[63379]: DEBUG nova.scheduler.client.report [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1457.313613] env[63379]: DEBUG nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1457.435550] env[63379]: DEBUG nova.network.neutron [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Updating instance_info_cache with network_info: [{"id": "58a83e9a-4269-4e0e-8eb8-2d5b517e605f", "address": "fa:16:3e:15:50:96", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a83e9a-42", "ovs_interfaceid": "58a83e9a-4269-4e0e-8eb8-2d5b517e605f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.522694] env[63379]: DEBUG nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1457.556705] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1457.557055] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1457.557275] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1457.557501] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1457.557690] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1457.557869] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1457.558132] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1457.558351] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1457.558576] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1457.558789] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1457.559009] env[63379]: DEBUG nova.virt.hardware [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1457.564253] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f43294-6ace-4143-bfe1-91179eb66a5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.574604] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2e083a-72b0-4686-965b-58fb5c35a4c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.616113] env[63379]: DEBUG oslo_concurrency.lockutils [req-a9250f7e-0ffd-495f-b1c3-4c4d17f062ee req-a0dabbac-6f35-4a87-aed3-bd3fc4065703 service nova] Releasing lock "refresh_cache-d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.727128] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.238s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.729677] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.696s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.729968] env[63379]: DEBUG nova.objects.instance [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lazy-loading 'resources' on Instance uuid aaaf4b06-ef84-41ba-8054-29582854a9f1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1457.755134] env[63379]: INFO nova.scheduler.client.report [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted allocations for instance d221329b-eee4-42f5-bb27-cf6af0386c04 [ 1457.837953] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.939150] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "refresh_cache-48c0d20e-adc4-40a9-888c-ffea363f6edb" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.939794] env[63379]: DEBUG nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Instance network_info: |[{"id": "58a83e9a-4269-4e0e-8eb8-2d5b517e605f", "address": "fa:16:3e:15:50:96", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a83e9a-42", "ovs_interfaceid": "58a83e9a-4269-4e0e-8eb8-2d5b517e605f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1457.942018] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:50:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58a83e9a-4269-4e0e-8eb8-2d5b517e605f', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1457.950656] env[63379]: DEBUG oslo.service.loopingcall [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.951108] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1457.951462] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7d2327c-7963-4e0e-9ec8-551180fba519 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.971922] env[63379]: DEBUG nova.compute.manager [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Received event network-changed-58a83e9a-4269-4e0e-8eb8-2d5b517e605f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1457.972761] env[63379]: DEBUG nova.compute.manager [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Refreshing instance network info cache due to event network-changed-58a83e9a-4269-4e0e-8eb8-2d5b517e605f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1457.973411] env[63379]: DEBUG oslo_concurrency.lockutils [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] Acquiring lock "refresh_cache-48c0d20e-adc4-40a9-888c-ffea363f6edb" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.973773] env[63379]: DEBUG oslo_concurrency.lockutils [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] Acquired lock "refresh_cache-48c0d20e-adc4-40a9-888c-ffea363f6edb" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.976349] env[63379]: DEBUG nova.network.neutron [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Refreshing network info cache for port 58a83e9a-4269-4e0e-8eb8-2d5b517e605f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1457.984245] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1457.984245] env[63379]: value = "task-1779178" [ 1457.984245] env[63379]: _type = "Task" [ 1457.984245] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.993279] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779178, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.267335] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d79c68bb-b07e-42ab-b6fd-1990e43fd95d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "d221329b-eee4-42f5-bb27-cf6af0386c04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.957s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.501377] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779178, 'name': CreateVM_Task, 'duration_secs': 0.488632} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.501377] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.502535] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.507098] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.507098] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.507098] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c12ac8c7-b544-4235-a15a-8ad74ff821ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.517164] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1458.517164] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52452d68-0eb4-ca74-bd03-7d95446656f1" [ 1458.517164] env[63379]: _type = "Task" [ 1458.517164] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.536057] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52452d68-0eb4-ca74-bd03-7d95446656f1, 'name': SearchDatastore_Task, 'duration_secs': 0.014337} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.536057] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.536057] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1458.536057] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.536057] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.536057] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1458.540020] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f18203ac-a84d-4461-9d5e-131b1dd66f93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.549420] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1458.549420] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1458.549420] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15b200e3-8dba-4ab5-ae00-15fbd1f0f120 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.556560] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1458.556560] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f3458-a798-87b3-4453-b889c705c60e" [ 1458.556560] env[63379]: _type = "Task" [ 1458.556560] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.576144] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f3458-a798-87b3-4453-b889c705c60e, 'name': SearchDatastore_Task, 'duration_secs': 0.014557} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.578250] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-183b814d-2ae3-4f29-951d-0e9c06176d08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.593027] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1458.593027] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f020e-a5c9-af55-e796-0d6c7673568d" [ 1458.593027] env[63379]: _type = "Task" [ 1458.593027] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.600963] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f020e-a5c9-af55-e796-0d6c7673568d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.682863] env[63379]: DEBUG nova.network.neutron [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Successfully updated port: 01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1458.806764] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4592759f-7cf4-4fbb-8633-e1bcbb49e0b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.819102] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de166f0c-554c-4f8b-86d4-f522a69242d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.854964] env[63379]: DEBUG nova.network.neutron [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Updated VIF entry in instance network info cache for port 58a83e9a-4269-4e0e-8eb8-2d5b517e605f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1458.855459] env[63379]: DEBUG nova.network.neutron [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Updating instance_info_cache with network_info: [{"id": "58a83e9a-4269-4e0e-8eb8-2d5b517e605f", "address": "fa:16:3e:15:50:96", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a83e9a-42", "ovs_interfaceid": "58a83e9a-4269-4e0e-8eb8-2d5b517e605f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.858028] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814930b2-007c-4fd1-8d29-b68f539b4823 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.867723] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fb53da-360b-44e8-8dcc-8fa2d0a4153e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.886145] env[63379]: DEBUG nova.compute.provider_tree [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.103316] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f020e-a5c9-af55-e796-0d6c7673568d, 'name': SearchDatastore_Task, 'duration_secs': 0.015399} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.103871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.104340] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 48c0d20e-adc4-40a9-888c-ffea363f6edb/48c0d20e-adc4-40a9-888c-ffea363f6edb.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1459.104702] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3ba6675-08cf-4dfd-9171-859d6f82a45b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.114889] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1459.114889] env[63379]: value = "task-1779179" [ 1459.114889] env[63379]: _type = "Task" [ 1459.114889] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.127435] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779179, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.191775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.191775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.191775] env[63379]: DEBUG nova.network.neutron [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1459.362267] env[63379]: DEBUG oslo_concurrency.lockutils [req-71a127d1-edda-41f0-9f79-f64758e06331 req-097dfd1a-0d17-4316-9373-0726a353136b service nova] Releasing lock "refresh_cache-48c0d20e-adc4-40a9-888c-ffea363f6edb" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.394489] env[63379]: DEBUG nova.scheduler.client.report [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1459.629782] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779179, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.757329] env[63379]: DEBUG nova.network.neutron [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1459.779407] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b329e4-7901-7537-3458-06af5f4f07b0/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1459.780628] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92821a8-79e8-485a-aa22-fc91d71b6500 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.793673] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b329e4-7901-7537-3458-06af5f4f07b0/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1459.793872] env[63379]: ERROR oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b329e4-7901-7537-3458-06af5f4f07b0/disk-0.vmdk due to incomplete transfer. [ 1459.794150] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e15a14d4-f30c-4747-a71d-a84d37156d07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.807694] env[63379]: DEBUG oslo_vmware.rw_handles [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b329e4-7901-7537-3458-06af5f4f07b0/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1459.807907] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Uploaded image f2b0f428-9e21-4972-9c70-84b0e00c7270 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1459.810206] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1459.810709] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-27bf01d9-13fa-4f43-ac3e-eb0df832a74b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.820607] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1459.820607] env[63379]: value = "task-1779180" [ 1459.820607] env[63379]: _type = "Task" [ 1459.820607] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.830791] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779180, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.897133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.167s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.899628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.373s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.899877] env[63379]: DEBUG nova.objects.instance [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lazy-loading 'resources' on Instance uuid 571bb238-9cf3-475e-b596-a9609acc8696 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.936503] env[63379]: INFO nova.scheduler.client.report [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Deleted allocations for instance aaaf4b06-ef84-41ba-8054-29582854a9f1 [ 1459.995390] env[63379]: DEBUG nova.network.neutron [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [{"id": "01134024-43f6-41eb-b222-1e69cef1bfd4", "address": "fa:16:3e:25:e3:d2", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01134024-43", "ovs_interfaceid": "01134024-43f6-41eb-b222-1e69cef1bfd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.129981] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779179, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681835} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.129981] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 48c0d20e-adc4-40a9-888c-ffea363f6edb/48c0d20e-adc4-40a9-888c-ffea363f6edb.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1460.129981] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1460.129981] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4185bee-d4a3-4067-855f-e74fe40f3d96 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.138444] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1460.138444] env[63379]: value = "task-1779181" [ 1460.138444] env[63379]: _type = "Task" [ 1460.138444] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.148711] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.154036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "758ade2c-7f75-4907-95d5-681d5792ae31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.154036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "758ade2c-7f75-4907-95d5-681d5792ae31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.163599] env[63379]: DEBUG nova.compute.manager [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received event network-vif-plugged-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1460.164536] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Acquiring lock "915aec20-5765-4aad-8b0f-f2d71b7d6428-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.165117] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.167965] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.167965] env[63379]: DEBUG nova.compute.manager [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] No waiting events found dispatching network-vif-plugged-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1460.167965] env[63379]: WARNING nova.compute.manager [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received unexpected event network-vif-plugged-01134024-43f6-41eb-b222-1e69cef1bfd4 for instance with vm_state building and task_state spawning. [ 1460.167965] env[63379]: DEBUG nova.compute.manager [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Received event network-changed-b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1460.167965] env[63379]: DEBUG nova.compute.manager [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Refreshing instance network info cache due to event network-changed-b54d5849-e50d-4f42-922f-70d18e44b988. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1460.167965] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Acquiring lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.167965] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Acquired lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.167965] env[63379]: DEBUG nova.network.neutron [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Refreshing network info cache for port b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.337539] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779180, 'name': Destroy_Task, 'duration_secs': 0.453586} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.338237] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Destroyed the VM [ 1460.338980] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1460.339404] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-81fc0662-6bae-44c7-a033-0555866da4e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.349141] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1460.349141] env[63379]: value = "task-1779182" [ 1460.349141] env[63379]: _type = "Task" [ 1460.349141] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.361792] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779182, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.455323] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fc23ce3-c8c3-4aa3-a6b1-09b9c2cc579a tempest-TenantUsagesTestJSON-1255117023 tempest-TenantUsagesTestJSON-1255117023-project-member] Lock "aaaf4b06-ef84-41ba-8054-29582854a9f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.033s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.501205] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Releasing lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.501540] env[63379]: DEBUG nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Instance network_info: |[{"id": "01134024-43f6-41eb-b222-1e69cef1bfd4", "address": "fa:16:3e:25:e3:d2", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01134024-43", "ovs_interfaceid": "01134024-43f6-41eb-b222-1e69cef1bfd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1460.502244] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:e3:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '76e60ff4-204c-4f48-bd0e-2d5fa0a812ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01134024-43f6-41eb-b222-1e69cef1bfd4', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1460.510080] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Creating folder: Project (e17ea72d033544159bbaea7365a7f221). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1460.510736] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-724b2e78-5157-46b0-90b7-6e49480943bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.527699] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "0324da80-b97c-4dc9-9083-199fbda60341" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.527899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "0324da80-b97c-4dc9-9083-199fbda60341" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.528152] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Created folder: Project (e17ea72d033544159bbaea7365a7f221) in parent group-v369214. [ 1460.528336] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Creating folder: Instances. Parent ref: group-v369302. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1460.528577] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccb6a894-df2c-4506-861d-1ad0a2aa65f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.543867] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Created folder: Instances in parent group-v369302. [ 1460.544114] env[63379]: DEBUG oslo.service.loopingcall [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.544324] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1460.544602] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8eff0f45-bd64-4a0a-9e65-fbf187ac9255 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.568683] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1460.568683] env[63379]: value = "task-1779185" [ 1460.568683] env[63379]: _type = "Task" [ 1460.568683] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.585079] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779185, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.650195] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116103} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.650648] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1460.651324] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515e434c-bb7e-4c2a-8101-58a2228deb35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.676299] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 48c0d20e-adc4-40a9-888c-ffea363f6edb/48c0d20e-adc4-40a9-888c-ffea363f6edb.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1460.681113] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f691a6b4-e671-479f-99a9-0d607a8af476 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.702657] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1460.702657] env[63379]: value = "task-1779186" [ 1460.702657] env[63379]: _type = "Task" [ 1460.702657] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.713291] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.861278] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779182, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.928992] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07331eea-54f4-48a6-9fab-b52eaee81fb8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.936798] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3260b6e2-1111-42f6-a4b7-4aafb9be5a89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.972290] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c921c38-57f1-4ee1-a553-7d4e8c3e5f28 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.980506] env[63379]: DEBUG nova.network.neutron [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Updated VIF entry in instance network info cache for port b54d5849-e50d-4f42-922f-70d18e44b988. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1460.980864] env[63379]: DEBUG nova.network.neutron [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Updating instance_info_cache with network_info: [{"id": "b54d5849-e50d-4f42-922f-70d18e44b988", "address": "fa:16:3e:59:4a:db", "network": {"id": "46d4434c-021e-45e1-8971-650440e8abdc", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-551179399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6d10c9c1e964532945ff3157ebaaa4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ff3ecd2f-0b10-4faf-a512-fd7a20c28df1", "external-id": "nsx-vlan-transportzone-291", "segmentation_id": 291, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb54d5849-e5", "ovs_interfaceid": "b54d5849-e50d-4f42-922f-70d18e44b988", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.983030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bfca00-58d9-4308-b683-a30d5b0463c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.998337] env[63379]: DEBUG nova.compute.provider_tree [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1461.082094] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779185, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.212984] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.358814] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779182, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.488905] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Releasing lock "refresh_cache-08465a2c-1ab6-4c53-9b12-3cd51c717b03" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.489450] env[63379]: DEBUG nova.compute.manager [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1461.489712] env[63379]: DEBUG nova.compute.manager [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing instance network info cache due to event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1461.490026] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Acquiring lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.490248] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Acquired lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.490478] env[63379]: DEBUG nova.network.neutron [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1461.501324] env[63379]: DEBUG nova.scheduler.client.report [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1461.582014] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779185, 'name': CreateVM_Task, 'duration_secs': 0.669048} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.582218] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1461.582979] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.583179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.583615] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1461.583803] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec359b44-5131-410a-b31b-fe998e473939 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.590050] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1461.590050] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526b618b-f570-7512-ef5b-71d6660260b3" [ 1461.590050] env[63379]: _type = "Task" [ 1461.590050] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.599857] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526b618b-f570-7512-ef5b-71d6660260b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.714283] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779186, 'name': ReconfigVM_Task, 'duration_secs': 0.625495} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.714604] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 48c0d20e-adc4-40a9-888c-ffea363f6edb/48c0d20e-adc4-40a9-888c-ffea363f6edb.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1461.715264] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b5fe88d-625c-4fd4-88d4-5ee49d2f5c15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.723405] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1461.723405] env[63379]: value = "task-1779187" [ 1461.723405] env[63379]: _type = "Task" [ 1461.723405] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.732982] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779187, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.860633] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779182, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.006784] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.009634] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 35.064s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.041620] env[63379]: INFO nova.scheduler.client.report [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted allocations for instance 571bb238-9cf3-475e-b596-a9609acc8696 [ 1462.100899] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526b618b-f570-7512-ef5b-71d6660260b3, 'name': SearchDatastore_Task, 'duration_secs': 0.020886} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.103367] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.103627] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1462.103866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1462.104088] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1462.104209] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1462.104578] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0efbe6bf-d45e-4609-8605-4cf123e41515 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.116169] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1462.116376] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1462.117178] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0b1c3a5-e90d-45d8-97c5-24d48bdf83e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.126033] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1462.126033] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fb6c10-860c-2976-dd81-60995790d7c2" [ 1462.126033] env[63379]: _type = "Task" [ 1462.126033] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.134751] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fb6c10-860c-2976-dd81-60995790d7c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.236704] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779187, 'name': Rename_Task, 'duration_secs': 0.173811} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.237052] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1462.237417] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91f5d01c-1609-462d-9248-fea414161cf2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.246991] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1462.246991] env[63379]: value = "task-1779188" [ 1462.246991] env[63379]: _type = "Task" [ 1462.246991] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.264614] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.340305] env[63379]: DEBUG nova.network.neutron [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updated VIF entry in instance network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1462.340678] env[63379]: DEBUG nova.network.neutron [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [{"id": "01134024-43f6-41eb-b222-1e69cef1bfd4", "address": "fa:16:3e:25:e3:d2", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01134024-43", "ovs_interfaceid": "01134024-43f6-41eb-b222-1e69cef1bfd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.362368] env[63379]: DEBUG oslo_vmware.api [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779182, 'name': RemoveSnapshot_Task, 'duration_secs': 1.638519} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.362368] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1462.362513] env[63379]: INFO nova.compute.manager [None req-91f28ea9-d272-430a-b17f-3bca3f960fb4 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Took 15.18 seconds to snapshot the instance on the hypervisor. [ 1462.551270] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f9f28cb-7c5b-4e6d-921c-d0df3998b98d tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "571bb238-9cf3-475e-b596-a9609acc8696" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.889s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.642549] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fb6c10-860c-2976-dd81-60995790d7c2, 'name': SearchDatastore_Task, 'duration_secs': 0.013956} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.646014] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa904e6-d306-4a26-91e8-6e4969ce4e0e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.650623] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1462.650623] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c7f3fb-1239-4c14-60bc-6a3e78a4d263" [ 1462.650623] env[63379]: _type = "Task" [ 1462.650623] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.659962] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c7f3fb-1239-4c14-60bc-6a3e78a4d263, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.758435] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779188, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.843939] env[63379]: DEBUG oslo_concurrency.lockutils [req-3670eafa-2821-4618-a708-88f664cf6cf3 req-f96737fa-d599-4336-94e2-218f6e337781 service nova] Releasing lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.030695] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Applying migration context for instance aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae as it has an incoming, in-progress migration 786df903-0cb0-4f51-a75d-824fa35e0a15. Migration status is confirming {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1463.032371] env[63379]: INFO nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating resource usage from migration 786df903-0cb0-4f51-a75d-824fa35e0a15 [ 1463.053360] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance efc5b3b6-bed4-484c-8a0c-65810747382d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1463.053532] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 0edadcca-042e-440b-985b-6338e20265fa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1463.053675] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 55fb6899-0321-4bf2-bf3f-2e87dd479433 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.053990] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d47be684-6cd8-45c6-8c6a-9a6db0390f97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.053990] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance bf0dd3cf-684c-4378-a89c-5b9f16df062d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.054106] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 25090d85-cd10-44fc-aa9d-071ada14f249 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1463.054188] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.054431] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 6b4e80fc-582f-432b-aa99-ec133127578e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1463.054431] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance de671ba9-0d86-4f89-a6bd-ecea9ad0ba85 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1463.054592] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance c999d64e-3f5b-4854-8b92-6d0d17f49dd7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1463.054666] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 41952d7b-ce23-4e9b-8843-bbac1d3099c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.054791] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1463.054907] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Migration 786df903-0cb0-4f51-a75d-824fa35e0a15 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1463.055031] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.055147] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 941ac23c-6aa9-4ed1-840a-326423b7cbc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.055258] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 76731b1b-af66-441b-8fe4-d5d7e7faf3ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.055366] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d2f5b406-3d0e-4150-aeaf-7cdacbc12c06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.055553] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 08465a2c-1ab6-4c53-9b12-3cd51c717b03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.055695] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 48c0d20e-adc4-40a9-888c-ffea363f6edb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.055817] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 915aec20-5765-4aad-8b0f-f2d71b7d6428 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.162257] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c7f3fb-1239-4c14-60bc-6a3e78a4d263, 'name': SearchDatastore_Task, 'duration_secs': 0.01082} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.162555] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.162816] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 915aec20-5765-4aad-8b0f-f2d71b7d6428/915aec20-5765-4aad-8b0f-f2d71b7d6428.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.163090] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-667a0d41-0877-4475-a49b-e9b8986870c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.172036] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1463.172036] env[63379]: value = "task-1779189" [ 1463.172036] env[63379]: _type = "Task" [ 1463.172036] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.192906] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.260311] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779188, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.551880] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "2f98800d-800f-4ad7-bd65-f12879f02ce5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.552290] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.558715] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 04234ba7-24a3-48e5-9f62-6f4dddd0054a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1463.589156] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.589156] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.684591] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779189, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.760839] env[63379]: DEBUG oslo_vmware.api [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779188, 'name': PowerOnVM_Task, 'duration_secs': 1.098896} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.761155] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1463.761458] env[63379]: INFO nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1463.761677] env[63379]: DEBUG nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1463.762471] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3586cf-52a9-490f-ab84-71657a6c6e56 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.065219] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1464.193727] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779189, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532226} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.194018] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 915aec20-5765-4aad-8b0f-f2d71b7d6428/915aec20-5765-4aad-8b0f-f2d71b7d6428.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1464.194242] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1464.194549] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-229f2d91-9cbf-4ac0-91d3-3326419ed82b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.201482] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1464.201482] env[63379]: value = "task-1779190" [ 1464.201482] env[63379]: _type = "Task" [ 1464.201482] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.210446] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779190, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.279588] env[63379]: INFO nova.compute.manager [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Took 44.61 seconds to build instance. [ 1464.504496] env[63379]: DEBUG nova.compute.manager [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1464.505579] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3405bb9f-2c7e-4629-ac2e-3d82e095e416 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.568445] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance bc7baa1a-f65d-41d4-ad86-de041fbb2306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1464.712307] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779190, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066019} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.712586] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1464.713385] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ffb9a7-e797-45c1-af7e-7cef18acb67d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.736296] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 915aec20-5765-4aad-8b0f-f2d71b7d6428/915aec20-5765-4aad-8b0f-f2d71b7d6428.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1464.736601] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f98b539f-3196-4a77-a032-0fe2ffdc9780 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.757921] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1464.757921] env[63379]: value = "task-1779191" [ 1464.757921] env[63379]: _type = "Task" [ 1464.757921] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.766058] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.781862] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c8eef823-df62-4ad2-a103-901b0156607e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.777s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.018540] env[63379]: INFO nova.compute.manager [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] instance snapshotting [ 1465.023202] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d368c0a-4c01-4dbe-a1d1-668ed8aba472 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.049686] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7dbfe2-b992-4f17-b88a-41ec537cf393 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.071362] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 07cc8cd7-8368-41dd-ae13-01c8275cac9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1465.269161] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779191, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.284949] env[63379]: DEBUG nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1465.563048] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1465.563413] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-edd44e70-af74-4c02-8869-a6cb55ba94cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.572098] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1465.572098] env[63379]: value = "task-1779192" [ 1465.572098] env[63379]: _type = "Task" [ 1465.572098] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.577490] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a78feafb-00bc-44c4-acd3-a36fb8a81767 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1465.582241] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779192, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.671315] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71860613-5193-4c46-9253-45efcd1fb399 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.682845] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6501323-7b7c-4e02-9ac9-04bb15f332f9 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Suspending the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1465.683143] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-64abbaae-7a58-4ba3-b98b-f6d1c9402f29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.693020] env[63379]: DEBUG oslo_vmware.api [None req-b6501323-7b7c-4e02-9ac9-04bb15f332f9 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1465.693020] env[63379]: value = "task-1779193" [ 1465.693020] env[63379]: _type = "Task" [ 1465.693020] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.699460] env[63379]: DEBUG oslo_vmware.api [None req-b6501323-7b7c-4e02-9ac9-04bb15f332f9 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779193, 'name': SuspendVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.771981] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779191, 'name': ReconfigVM_Task, 'duration_secs': 0.514845} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.776030] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 915aec20-5765-4aad-8b0f-f2d71b7d6428/915aec20-5765-4aad-8b0f-f2d71b7d6428.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1465.776030] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-778fc8ab-f5d7-4db3-9228-e30cb558391c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.784478] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1465.784478] env[63379]: value = "task-1779194" [ 1465.784478] env[63379]: _type = "Task" [ 1465.784478] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.801711] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779194, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.821637] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.083479] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779192, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.084950] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.201435] env[63379]: DEBUG oslo_vmware.api [None req-b6501323-7b7c-4e02-9ac9-04bb15f332f9 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779193, 'name': SuspendVM_Task} progress is 62%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.294808] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779194, 'name': Rename_Task, 'duration_secs': 0.445343} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.295360] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.295848] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1365aea0-5ec1-4cb2-8825-c2c39701b921 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.302439] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1466.302439] env[63379]: value = "task-1779195" [ 1466.302439] env[63379]: _type = "Task" [ 1466.302439] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.313319] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779195, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.588954] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779192, 'name': CreateSnapshot_Task, 'duration_secs': 0.581636} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.588954] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1466.588954] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance ee36cc5f-61a1-4e4f-9cae-670f5868d90c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.589654] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e43b4f-5397-438d-826a-f3a565498fc0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.704376] env[63379]: DEBUG oslo_vmware.api [None req-b6501323-7b7c-4e02-9ac9-04bb15f332f9 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779193, 'name': SuspendVM_Task, 'duration_secs': 0.790812} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.706024] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b6501323-7b7c-4e02-9ac9-04bb15f332f9 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Suspended the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1466.706024] env[63379]: DEBUG nova.compute.manager [None req-b6501323-7b7c-4e02-9ac9-04bb15f332f9 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1466.706024] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6c03b5-cb1c-4ed4-9a8d-17dbade76388 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.818252] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779195, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.094754] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance aa44a4ff-14e5-42d2-a082-06fe0ae9646c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1467.109453] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1467.110694] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b6aa482b-89a5-4dba-801c-8faf3e8b010e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.121044] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1467.121044] env[63379]: value = "task-1779196" [ 1467.121044] env[63379]: _type = "Task" [ 1467.121044] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.131390] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779196, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.316500] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779195, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.599684] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance aedff32b-b0c2-4a93-a2c6-349d26839cc4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1467.634235] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779196, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.818662] env[63379]: DEBUG oslo_vmware.api [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779195, 'name': PowerOnVM_Task, 'duration_secs': 1.444347} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.819058] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1467.819244] env[63379]: INFO nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Took 10.30 seconds to spawn the instance on the hypervisor. [ 1467.819451] env[63379]: DEBUG nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1467.820305] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a46a158-5771-4e41-88b4-1d3b768af20f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.102975] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 758ade2c-7f75-4907-95d5-681d5792ae31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1468.134960] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779196, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.220136] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "c439fe86-fc43-4c05-a4b7-3634a043269a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.220136] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.343850] env[63379]: INFO nova.compute.manager [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Took 43.76 seconds to build instance. [ 1468.605608] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 0324da80-b97c-4dc9-9083-199fbda60341 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1468.633170] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779196, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.845368] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75d37116-3804-4492-ac87-92893f2d4936 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.156s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.081996] env[63379]: DEBUG nova.compute.manager [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1469.083025] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7e96e0-2859-424d-bc2a-c311f480cb14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.108921] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 2f98800d-800f-4ad7-bd65-f12879f02ce5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1469.109300] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1469.109401] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1469.134136] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779196, 'name': CloneVM_Task, 'duration_secs': 1.939666} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.135761] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Created linked-clone VM from snapshot [ 1469.136454] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd1dbb7-7bbf-438c-a780-f9649996dbfa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.148386] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Uploading image 1744a749-375f-44d7-aeb9-f530f04c9000 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1469.172162] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1469.172162] env[63379]: value = "vm-369306" [ 1469.172162] env[63379]: _type = "VirtualMachine" [ 1469.172162] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1469.173461] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2bf43ccd-79c9-4144-9ddd-397b96426058 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.189847] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lease: (returnval){ [ 1469.189847] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e02780-c26f-d063-6e7d-7b7074a24b24" [ 1469.189847] env[63379]: _type = "HttpNfcLease" [ 1469.189847] env[63379]: } obtained for exporting VM: (result){ [ 1469.189847] env[63379]: value = "vm-369306" [ 1469.189847] env[63379]: _type = "VirtualMachine" [ 1469.189847] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1469.190457] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the lease: (returnval){ [ 1469.190457] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e02780-c26f-d063-6e7d-7b7074a24b24" [ 1469.190457] env[63379]: _type = "HttpNfcLease" [ 1469.190457] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1469.205937] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1469.205937] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e02780-c26f-d063-6e7d-7b7074a24b24" [ 1469.205937] env[63379]: _type = "HttpNfcLease" [ 1469.205937] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1469.349182] env[63379]: DEBUG nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.572447] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea4ca57-e417-4257-925d-afac46387041 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.581137] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17070a26-8979-49b6-b966-cb09e607e1ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.613290] env[63379]: INFO nova.compute.manager [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] instance snapshotting [ 1469.613547] env[63379]: WARNING nova.compute.manager [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1469.616085] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aaa5ede-123a-49d5-9cf3-4e482718dda3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.619705] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbc33ab-b752-4a2a-8abd-0a0e74de70a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.640758] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf61f25b-f7e1-4f56-9325-f543013a27d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.644700] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25204e94-9025-4804-b527-de92d7dc38bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.660423] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1469.702527] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1469.702527] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e02780-c26f-d063-6e7d-7b7074a24b24" [ 1469.702527] env[63379]: _type = "HttpNfcLease" [ 1469.702527] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1469.702832] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1469.702832] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e02780-c26f-d063-6e7d-7b7074a24b24" [ 1469.702832] env[63379]: _type = "HttpNfcLease" [ 1469.702832] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1469.703583] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effa3260-e865-4c22-95a3-89b52ca9cb54 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.711285] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ff3184-ade2-405a-8f16-58a567d1e39e/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1469.711464] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ff3184-ade2-405a-8f16-58a567d1e39e/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1469.799043] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6b87bace-6184-454a-8099-bf782797f022 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.862431] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "318355e9-b4cc-4645-ac51-b583d14e1134" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.862797] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.869479] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.167818] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1470.171308] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1470.174812] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f34f127f-e62d-453a-becb-f7d88ab33166 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.195900] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1470.195900] env[63379]: value = "task-1779198" [ 1470.195900] env[63379]: _type = "Task" [ 1470.195900] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.208049] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779198, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.673012] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1470.673641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.664s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.676115] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.910s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.676115] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.677138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.725s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.677490] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.679741] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.905s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.680090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.685714] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.924s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.685714] env[63379]: INFO nova.compute.claims [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1470.712742] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779198, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.716220] env[63379]: INFO nova.scheduler.client.report [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Deleted allocations for instance 6b4e80fc-582f-432b-aa99-ec133127578e [ 1470.720545] env[63379]: INFO nova.scheduler.client.report [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Deleted allocations for instance 25090d85-cd10-44fc-aa9d-071ada14f249 [ 1470.739737] env[63379]: INFO nova.scheduler.client.report [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleted allocations for instance efc5b3b6-bed4-484c-8a0c-65810747382d [ 1471.207639] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779198, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.232185] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bd9a3b51-1fd2-4a0f-9755-14f62b5be011 tempest-ServerExternalEventsTest-416951114 tempest-ServerExternalEventsTest-416951114-project-member] Lock "6b4e80fc-582f-432b-aa99-ec133127578e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.925s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.233664] env[63379]: DEBUG oslo_concurrency.lockutils [None req-450f068e-6792-4f5d-b465-4f79110f9501 tempest-FloatingIPsAssociationNegativeTestJSON-35250021 tempest-FloatingIPsAssociationNegativeTestJSON-35250021-project-member] Lock "25090d85-cd10-44fc-aa9d-071ada14f249" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.562s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.250578] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b1269a0f-59e8-4dca-9fde-96f070df9bb8 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "efc5b3b6-bed4-484c-8a0c-65810747382d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.615s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.710766] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779198, 'name': CreateSnapshot_Task, 'duration_secs': 1.22403} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.711178] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1471.711884] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f67791c-be0c-4253-9025-f3a0c829f513 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.184609] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33488ced-4f41-4790-8288-5bfd137258a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.192644] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9557e885-ae39-44d0-bd49-23ca015ddb53 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.223689] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b63be4-863d-46fb-8679-cbfc3f2034e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.233142] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1472.233229] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8cc1a9a4-4a66-482e-9105-f4cf1d9aa5ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.243888] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289c46e6-8432-4d8a-bf8c-7d9db9150cc0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.247821] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1472.247821] env[63379]: value = "task-1779199" [ 1472.247821] env[63379]: _type = "Task" [ 1472.247821] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.259386] env[63379]: DEBUG nova.compute.provider_tree [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.263505] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779199, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.689206] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.689639] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.765813] env[63379]: DEBUG nova.scheduler.client.report [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1472.769811] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779199, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.203024] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.203024] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1473.203024] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1473.260683] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779199, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.272565] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.273108] env[63379]: DEBUG nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1473.275933] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.296s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.276148] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.278012] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.293s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.280199] env[63379]: INFO nova.compute.claims [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1473.324634] env[63379]: INFO nova.scheduler.client.report [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Deleted allocations for instance 0edadcca-042e-440b-985b-6338e20265fa [ 1473.707246] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Skipping network cache update for instance because it is Building. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9987}} [ 1473.730085] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.730290] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.730478] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1473.730675] env[63379]: DEBUG nova.objects.instance [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lazy-loading 'info_cache' on Instance uuid 0edadcca-042e-440b-985b-6338e20265fa {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1473.760516] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779199, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.790584] env[63379]: DEBUG nova.compute.utils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1473.794837] env[63379]: DEBUG nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1473.795114] env[63379]: DEBUG nova.network.neutron [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1473.834032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4615207f-e270-4c63-886e-775f7ae38a96 tempest-ListServersNegativeTestJSON-1342451698 tempest-ListServersNegativeTestJSON-1342451698-project-member] Lock "0edadcca-042e-440b-985b-6338e20265fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.135s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.863706] env[63379]: DEBUG nova.policy [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'deef4f9ae0754a6c8a7f673c10a76408', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c01c5c8c3734c4ea066324e542e7374', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1474.225839] env[63379]: DEBUG nova.network.neutron [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Successfully created port: ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1474.261620] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779199, 'name': CloneVM_Task, 'duration_secs': 1.805019} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.261941] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Created linked-clone VM from snapshot [ 1474.262812] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de391493-2ebb-404c-82a4-d0dcb973ca69 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.272040] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Uploading image 8f042f3b-ea84-4947-9d5d-d9ea8c484f3e {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1474.299350] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1474.299350] env[63379]: value = "vm-369308" [ 1474.299350] env[63379]: _type = "VirtualMachine" [ 1474.299350] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1474.305278] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8870ceaa-afd8-4b48-b7a9-bd1981f0cf96 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.305278] env[63379]: DEBUG nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1474.315075] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease: (returnval){ [ 1474.315075] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525522b5-4dda-e320-047f-3e426db77d0f" [ 1474.315075] env[63379]: _type = "HttpNfcLease" [ 1474.315075] env[63379]: } obtained for exporting VM: (result){ [ 1474.315075] env[63379]: value = "vm-369308" [ 1474.315075] env[63379]: _type = "VirtualMachine" [ 1474.315075] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1474.315477] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the lease: (returnval){ [ 1474.315477] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525522b5-4dda-e320-047f-3e426db77d0f" [ 1474.315477] env[63379]: _type = "HttpNfcLease" [ 1474.315477] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1474.323402] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1474.323402] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525522b5-4dda-e320-047f-3e426db77d0f" [ 1474.323402] env[63379]: _type = "HttpNfcLease" [ 1474.323402] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1474.742265] env[63379]: DEBUG nova.compute.utils [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Can not refresh info_cache because instance was not found {{(pid=63379) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1474.779637] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1474.831553] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1474.831553] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525522b5-4dda-e320-047f-3e426db77d0f" [ 1474.831553] env[63379]: _type = "HttpNfcLease" [ 1474.831553] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1474.831874] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1474.831874] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525522b5-4dda-e320-047f-3e426db77d0f" [ 1474.831874] env[63379]: _type = "HttpNfcLease" [ 1474.831874] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1474.836834] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ffe51f-96a6-4536-ae73-34739d15741f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.847583] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a19f1e-81f4-c8fd-606a-f45d45c04bf1/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1474.847773] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a19f1e-81f4-c8fd-606a-f45d45c04bf1/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1474.913200] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c906de-d00c-4e85-bf66-2051513bbf0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.923774] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8710a2a3-3379-47f4-af2d-30061f44c962 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.964157] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bf6e4c-1b3f-4a05-b06c-27f690a5651e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.969233] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-13748b3b-c345-41fd-ad9d-8d302194045a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.977106] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58efa0fa-976f-4a08-ace7-1f79ccc75e58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.993724] env[63379]: DEBUG nova.compute.provider_tree [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.317578] env[63379]: DEBUG nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1475.349967] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1475.350284] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1475.350464] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1475.350678] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1475.350858] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1475.352528] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1475.352528] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1475.352805] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1475.353073] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1475.353847] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1475.353847] env[63379]: DEBUG nova.virt.hardware [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1475.354518] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15319e96-5959-4157-b3a2-f2eeaf0c4e6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.365129] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1995b3fe-85b5-45c1-bcbf-e03a8316873e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.461369] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.499151] env[63379]: DEBUG nova.scheduler.client.report [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.964994] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-0edadcca-042e-440b-985b-6338e20265fa" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.965226] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 1475.965631] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.965868] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.966171] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.966382] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.969418] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.969710] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.969817] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1475.969972] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.010455] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.010705] env[63379]: DEBUG nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1476.014720] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.812s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.014977] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.018959] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.819s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.018959] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.019824] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.644s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.024830] env[63379]: INFO nova.compute.claims [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1476.069110] env[63379]: INFO nova.scheduler.client.report [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Deleted allocations for instance c999d64e-3f5b-4854-8b92-6d0d17f49dd7 [ 1476.075222] env[63379]: INFO nova.scheduler.client.report [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Deleted allocations for instance de671ba9-0d86-4f89-a6bd-ecea9ad0ba85 [ 1476.183198] env[63379]: DEBUG nova.network.neutron [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Successfully updated port: ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1476.475382] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.536374] env[63379]: DEBUG nova.compute.utils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1476.542438] env[63379]: DEBUG nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1476.542672] env[63379]: DEBUG nova.network.neutron [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1476.584225] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b6d271ca-ef28-45e1-850d-73097d3eaf57 tempest-ServerAddressesNegativeTestJSON-1486370473 tempest-ServerAddressesNegativeTestJSON-1486370473-project-member] Lock "c999d64e-3f5b-4854-8b92-6d0d17f49dd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.075s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.585799] env[63379]: DEBUG oslo_concurrency.lockutils [None req-93b79982-385d-4ae8-bd60-0c3d501e4c59 tempest-InstanceActionsV221TestJSON-186298048 tempest-InstanceActionsV221TestJSON-186298048-project-member] Lock "de671ba9-0d86-4f89-a6bd-ecea9ad0ba85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.772s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.628444] env[63379]: DEBUG nova.policy [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b70275b98f8b4569a93d289fbd25901a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '696eed8e898e4ffd831805df17a93d27', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1476.685824] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.686675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.686909] env[63379]: DEBUG nova.network.neutron [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.046275] env[63379]: DEBUG nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1477.236277] env[63379]: DEBUG nova.network.neutron [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Successfully created port: 1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1477.284031] env[63379]: DEBUG nova.network.neutron [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1477.377113] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ff3184-ade2-405a-8f16-58a567d1e39e/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1477.378243] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8adf5d-e8d2-446e-afdb-c7db1d107370 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.389322] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ff3184-ade2-405a-8f16-58a567d1e39e/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1477.389431] env[63379]: ERROR oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ff3184-ade2-405a-8f16-58a567d1e39e/disk-0.vmdk due to incomplete transfer. [ 1477.390027] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-492a27ca-330e-460a-9d58-bc671d2cfa5b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.409076] env[63379]: DEBUG oslo_vmware.rw_handles [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ff3184-ade2-405a-8f16-58a567d1e39e/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1477.409076] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Uploaded image 1744a749-375f-44d7-aeb9-f530f04c9000 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1477.410560] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1477.411895] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-612d120b-5e15-4cca-8a14-867517fb44ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.421794] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1477.421794] env[63379]: value = "task-1779201" [ 1477.421794] env[63379]: _type = "Task" [ 1477.421794] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.436736] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779201, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.503080] env[63379]: DEBUG nova.compute.manager [req-b7e0f973-d01a-4041-9148-3b70c74348cc req-6eb4c7ac-10ce-48c1-ae4e-6612620f25c5 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Received event network-vif-plugged-ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1477.503080] env[63379]: DEBUG oslo_concurrency.lockutils [req-b7e0f973-d01a-4041-9148-3b70c74348cc req-6eb4c7ac-10ce-48c1-ae4e-6612620f25c5 service nova] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.503080] env[63379]: DEBUG oslo_concurrency.lockutils [req-b7e0f973-d01a-4041-9148-3b70c74348cc req-6eb4c7ac-10ce-48c1-ae4e-6612620f25c5 service nova] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.503080] env[63379]: DEBUG oslo_concurrency.lockutils [req-b7e0f973-d01a-4041-9148-3b70c74348cc req-6eb4c7ac-10ce-48c1-ae4e-6612620f25c5 service nova] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.503080] env[63379]: DEBUG nova.compute.manager [req-b7e0f973-d01a-4041-9148-3b70c74348cc req-6eb4c7ac-10ce-48c1-ae4e-6612620f25c5 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] No waiting events found dispatching network-vif-plugged-ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1477.503080] env[63379]: WARNING nova.compute.manager [req-b7e0f973-d01a-4041-9148-3b70c74348cc req-6eb4c7ac-10ce-48c1-ae4e-6612620f25c5 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Received unexpected event network-vif-plugged-ef820562-0de4-462d-a51d-13e4a4929719 for instance with vm_state building and task_state spawning. [ 1477.629472] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d40d51-d330-4f8c-8256-d8d99df70e55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.641322] env[63379]: DEBUG nova.network.neutron [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.643305] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405b2cb7-4259-43a8-b52e-5833c47f5d9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.689936] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35141013-d881-4b3d-9b6d-63ec35ae1c3f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.700655] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed79869-8a66-482a-b08b-73683c0348ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.719772] env[63379]: DEBUG nova.compute.provider_tree [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.933841] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779201, 'name': Destroy_Task, 'duration_secs': 0.432914} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.938270] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Destroyed the VM [ 1477.938599] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1477.938904] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ba54948a-063a-4e6e-a801-dd4bd10de31a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.947627] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1477.947627] env[63379]: value = "task-1779202" [ 1477.947627] env[63379]: _type = "Task" [ 1477.947627] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.964384] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779202, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.058278] env[63379]: DEBUG nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1478.101982] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1478.102406] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1478.102406] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1478.102464] env[63379]: DEBUG nova.virt.hardware [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1478.103593] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0a1dd0-1acd-483b-9856-f1967c7bef9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.114473] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7269ae5d-33a5-45f5-8380-9741676b3bf8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.152512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.152856] env[63379]: DEBUG nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Instance network_info: |[{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1478.153321] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:5b:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef820562-0de4-462d-a51d-13e4a4929719', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.162050] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Creating folder: Project (8c01c5c8c3734c4ea066324e542e7374). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1478.162879] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e881ac39-3984-4e28-af05-af22926afb08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.193634] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Created folder: Project (8c01c5c8c3734c4ea066324e542e7374) in parent group-v369214. [ 1478.194053] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Creating folder: Instances. Parent ref: group-v369309. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1478.194366] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-020f3361-1ac1-4bc8-b012-e897fa94f29e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.211538] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Created folder: Instances in parent group-v369309. [ 1478.211841] env[63379]: DEBUG oslo.service.loopingcall [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.212073] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1478.212316] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2726820-3155-4193-bcb5-fc813ea56f9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.233045] env[63379]: DEBUG nova.scheduler.client.report [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1478.242825] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.242825] env[63379]: value = "task-1779205" [ 1478.242825] env[63379]: _type = "Task" [ 1478.242825] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.253120] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779205, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.462989] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779202, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.739559] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.720s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.740789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.433s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.743391] env[63379]: INFO nova.compute.claims [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1478.759876] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779205, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.965788] env[63379]: DEBUG oslo_vmware.api [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779202, 'name': RemoveSnapshot_Task, 'duration_secs': 0.596688} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.966594] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1478.966998] env[63379]: INFO nova.compute.manager [None req-3e422b01-133e-48f2-94ba-62dcf2caeb2b tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Took 13.95 seconds to snapshot the instance on the hypervisor. [ 1479.247590] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "e03240b2-57eb-435e-988e-4ba896145262" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.247919] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "e03240b2-57eb-435e-988e-4ba896145262" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.275687] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779205, 'name': CreateVM_Task, 'duration_secs': 0.597863} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.275810] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1479.276541] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.276740] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.277136] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.277444] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06d1f007-84c2-4b5f-b4d1-8adcd0a50bbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.284973] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1479.284973] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522c89a6-31a6-2711-7186-78624d95db6f" [ 1479.284973] env[63379]: _type = "Task" [ 1479.284973] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.295347] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522c89a6-31a6-2711-7186-78624d95db6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.769659] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "e03240b2-57eb-435e-988e-4ba896145262" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.518s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.769659] env[63379]: DEBUG nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1479.807262] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522c89a6-31a6-2711-7186-78624d95db6f, 'name': SearchDatastore_Task, 'duration_secs': 0.019489} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.807262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.807262] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.807262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.807262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.807262] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.807262] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb0b3810-e52d-44b6-b44c-00ffdcd356cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.823177] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.823177] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.824038] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eb071ae-e3e6-479a-a50b-ec71e804a5a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.839021] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1479.839021] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527afc91-aab3-2995-7c55-e393ed52255f" [ 1479.839021] env[63379]: _type = "Task" [ 1479.839021] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.860581] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527afc91-aab3-2995-7c55-e393ed52255f, 'name': SearchDatastore_Task, 'duration_secs': 0.012563} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.861514] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b28c1789-867c-4362-912f-ff7edfb7038d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.865898] env[63379]: DEBUG nova.network.neutron [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Successfully updated port: 1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1479.872449] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1479.872449] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52855717-17aa-3da1-4abd-32a5adff4871" [ 1479.872449] env[63379]: _type = "Task" [ 1479.872449] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.884821] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52855717-17aa-3da1-4abd-32a5adff4871, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.043485] env[63379]: DEBUG nova.compute.manager [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Received event network-changed-ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1480.043749] env[63379]: DEBUG nova.compute.manager [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Refreshing instance network info cache due to event network-changed-ef820562-0de4-462d-a51d-13e4a4929719. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1480.043945] env[63379]: DEBUG oslo_concurrency.lockutils [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.044237] env[63379]: DEBUG oslo_concurrency.lockutils [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.044429] env[63379]: DEBUG nova.network.neutron [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Refreshing network info cache for port ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1480.278130] env[63379]: DEBUG nova.compute.utils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1480.279913] env[63379]: DEBUG nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1480.279913] env[63379]: DEBUG nova.network.neutron [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1480.290556] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.291261] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.339590] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2b69de-6410-4816-8421-4358f8895822 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.348196] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0141bb09-827b-4fe0-b9f8-a8252ce669a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.387117] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.387315] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.387497] env[63379]: DEBUG nova.network.neutron [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1480.396088] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2084ab0b-435b-4a81-9b23-359afce37538 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.411586] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1de67d-0231-49e7-be86-cb9cfe0c7e8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.416122] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52855717-17aa-3da1-4abd-32a5adff4871, 'name': SearchDatastore_Task, 'duration_secs': 0.012248} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.417484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.417945] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 90f0c97d-695b-4975-8ab9-4e77a9175da1/90f0c97d-695b-4975-8ab9-4e77a9175da1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1480.419403] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc6259b5-a1a9-4899-a79d-2e01b999c841 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.432494] env[63379]: DEBUG nova.compute.provider_tree [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1480.435696] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1480.435696] env[63379]: value = "task-1779206" [ 1480.435696] env[63379]: _type = "Task" [ 1480.435696] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.446554] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.449047] env[63379]: DEBUG nova.policy [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdfa6bad8d344fd6adf51ce1f4500693', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee7362bde75a4a5aa7a24a09402ad153', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1480.787024] env[63379]: DEBUG nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1480.941529] env[63379]: DEBUG nova.scheduler.client.report [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1480.962755] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779206, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.055428] env[63379]: DEBUG nova.network.neutron [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1481.096861] env[63379]: DEBUG nova.network.neutron [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updated VIF entry in instance network info cache for port ef820562-0de4-462d-a51d-13e4a4929719. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1481.097591] env[63379]: DEBUG nova.network.neutron [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.388829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.388829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.388829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.388829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.388829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.390071] env[63379]: INFO nova.compute.manager [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Terminating instance [ 1481.394184] env[63379]: DEBUG nova.compute.manager [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1481.394627] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1481.395717] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8103e5-b8bb-48e4-a0a6-b6602e63c914 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.407570] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1481.407913] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ec40a24-f920-45ec-bc58-a5d591fb4561 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.417800] env[63379]: DEBUG oslo_vmware.api [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1481.417800] env[63379]: value = "task-1779207" [ 1481.417800] env[63379]: _type = "Task" [ 1481.417800] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.428045] env[63379]: DEBUG oslo_vmware.api [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.450947] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.451643] env[63379]: DEBUG nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1481.454366] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 40.668s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.463650] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615414} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.463974] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 90f0c97d-695b-4975-8ab9-4e77a9175da1/90f0c97d-695b-4975-8ab9-4e77a9175da1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1481.465083] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1481.465083] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3e55f0c-e38d-496d-a4ac-0eda70a4cdc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.475796] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1481.475796] env[63379]: value = "task-1779208" [ 1481.475796] env[63379]: _type = "Task" [ 1481.475796] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.487387] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.603716] env[63379]: DEBUG oslo_concurrency.lockutils [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.603716] env[63379]: DEBUG nova.compute.manager [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Received event network-vif-plugged-1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1481.603716] env[63379]: DEBUG oslo_concurrency.lockutils [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] Acquiring lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.603716] env[63379]: DEBUG oslo_concurrency.lockutils [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.603716] env[63379]: DEBUG oslo_concurrency.lockutils [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.603716] env[63379]: DEBUG nova.compute.manager [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] No waiting events found dispatching network-vif-plugged-1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1481.603716] env[63379]: WARNING nova.compute.manager [req-f1a175c6-3ceb-472b-b97c-1ea023c3a931 req-189c40d3-7462-44bb-ae04-90c0c4158301 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Received unexpected event network-vif-plugged-1c6c710f-163e-4747-8489-53e8fdf2cf1f for instance with vm_state building and task_state spawning. [ 1481.800693] env[63379]: DEBUG nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1481.826557] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1481.827145] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1481.827145] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1481.828191] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1481.828589] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1481.828819] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1481.829278] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1481.830862] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1481.831691] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1481.831993] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1481.832261] env[63379]: DEBUG nova.virt.hardware [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1481.833353] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001428ce-fe63-48ad-9ce8-606c182c248e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.838894] env[63379]: DEBUG nova.network.neutron [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Successfully created port: 73ef0a69-1fcf-4176-8fc8-5a95ef6add57 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1481.844580] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b65f3df-1d6c-4ef4-b627-902a1175f2bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.879134] env[63379]: DEBUG nova.network.neutron [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updating instance_info_cache with network_info: [{"id": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "address": "fa:16:3e:04:6f:f9", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c6c710f-16", "ovs_interfaceid": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.929183] env[63379]: DEBUG oslo_vmware.api [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779207, 'name': PowerOffVM_Task, 'duration_secs': 0.249862} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.929443] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1481.929631] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1481.930398] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00c2a9e9-3066-42fb-a0eb-1452bd512107 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.959741] env[63379]: DEBUG nova.compute.utils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1481.965323] env[63379]: DEBUG nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1481.965518] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1481.988556] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073914} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.991372] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1481.992304] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e125aa65-56f6-4d2c-9042-3971f016dcfd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.025980] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 90f0c97d-695b-4975-8ab9-4e77a9175da1/90f0c97d-695b-4975-8ab9-4e77a9175da1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1482.030358] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a519d8e-2ac1-49e3-9878-5b6c1bbff088 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.045183] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1482.045183] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1482.045356] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Deleting the datastore file [datastore1] 76731b1b-af66-441b-8fe4-d5d7e7faf3ca {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1482.045779] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d91a521-211a-48c7-8696-9945cebf526d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.054797] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1482.054797] env[63379]: value = "task-1779211" [ 1482.054797] env[63379]: _type = "Task" [ 1482.054797] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.059024] env[63379]: DEBUG oslo_vmware.api [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for the task: (returnval){ [ 1482.059024] env[63379]: value = "task-1779210" [ 1482.059024] env[63379]: _type = "Task" [ 1482.059024] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.074531] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779211, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.074869] env[63379]: DEBUG oslo_vmware.api [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.168803] env[63379]: DEBUG nova.policy [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8784c329b3794ea1ba4cd2fbc8a2c155', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2519cafe6c84b12b560995b2d3dd84d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1482.357132] env[63379]: DEBUG nova.compute.manager [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Received event network-changed-1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1482.357132] env[63379]: DEBUG nova.compute.manager [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Refreshing instance network info cache due to event network-changed-1c6c710f-163e-4747-8489-53e8fdf2cf1f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1482.357309] env[63379]: DEBUG oslo_concurrency.lockutils [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] Acquiring lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.383350] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Releasing lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.383592] env[63379]: DEBUG nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Instance network_info: |[{"id": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "address": "fa:16:3e:04:6f:f9", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c6c710f-16", "ovs_interfaceid": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1482.384968] env[63379]: DEBUG oslo_concurrency.lockutils [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] Acquired lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.384968] env[63379]: DEBUG nova.network.neutron [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Refreshing network info cache for port 1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1482.386090] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:6f:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c6c710f-163e-4747-8489-53e8fdf2cf1f', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1482.394431] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Creating folder: Project (696eed8e898e4ffd831805df17a93d27). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1482.394750] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c6f6744-430e-4045-8723-02a3b21fb1b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.407495] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Created folder: Project (696eed8e898e4ffd831805df17a93d27) in parent group-v369214. [ 1482.407804] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Creating folder: Instances. Parent ref: group-v369312. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1482.408280] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11e02c22-6ab3-4ac7-8f38-008ca46500c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.423733] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Created folder: Instances in parent group-v369312. [ 1482.423733] env[63379]: DEBUG oslo.service.loopingcall [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1482.423733] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1482.423959] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6d66ac6-a2d5-4d35-bb80-51e65b383d93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.454733] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1482.454733] env[63379]: value = "task-1779214" [ 1482.454733] env[63379]: _type = "Task" [ 1482.454733] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.469523] env[63379]: DEBUG nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1482.472643] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779214, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.571609] env[63379]: DEBUG oslo_vmware.api [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Task: {'id': task-1779210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180475} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.576203] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1482.576450] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1482.576773] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1482.577546] env[63379]: INFO nova.compute.manager [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1482.577881] env[63379]: DEBUG oslo.service.loopingcall [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1482.581223] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d0beaf-4aa5-4d43-b353-f13e091a9254 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.581351] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779211, 'name': ReconfigVM_Task, 'duration_secs': 0.317744} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.581614] env[63379]: DEBUG nova.compute.manager [-] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1482.581711] env[63379]: DEBUG nova.network.neutron [-] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1482.583505] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 90f0c97d-695b-4975-8ab9-4e77a9175da1/90f0c97d-695b-4975-8ab9-4e77a9175da1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1482.584674] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f78305b-95d6-40cd-adab-3ceafb8ff004 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.592076] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed9899f-791d-4013-a82f-807344bc253a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.597490] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1482.597490] env[63379]: value = "task-1779215" [ 1482.597490] env[63379]: _type = "Task" [ 1482.597490] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.626418] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1577e1-54c6-4875-bded-6bbe18449f2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.633230] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779215, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.640402] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2acac04-971e-4df2-86ad-edaf741c20fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.660219] env[63379]: DEBUG nova.compute.provider_tree [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.972635] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779214, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.116428] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779215, 'name': Rename_Task, 'duration_secs': 0.302007} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.116805] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1483.117087] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00c8e732-3484-45be-b6a9-1fa3b536208c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.124965] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1483.124965] env[63379]: value = "task-1779216" [ 1483.124965] env[63379]: _type = "Task" [ 1483.124965] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.135183] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.136947] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Successfully created port: 85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1483.169047] env[63379]: DEBUG nova.scheduler.client.report [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1483.212114] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a19f1e-81f4-c8fd-606a-f45d45c04bf1/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1483.213498] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fd9e57-601a-4cd2-abda-0cddf86f1214 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.225371] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a19f1e-81f4-c8fd-606a-f45d45c04bf1/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1483.225371] env[63379]: ERROR oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a19f1e-81f4-c8fd-606a-f45d45c04bf1/disk-0.vmdk due to incomplete transfer. [ 1483.225371] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9a81fb7b-b039-4505-9e51-47cd8916266d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.232737] env[63379]: DEBUG oslo_vmware.rw_handles [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a19f1e-81f4-c8fd-606a-f45d45c04bf1/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1483.232989] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Uploaded image 8f042f3b-ea84-4947-9d5d-d9ea8c484f3e to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1483.234809] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1483.235108] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-abcc2a86-bce2-45a3-8b97-f689c2a7d63e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.242247] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1483.242247] env[63379]: value = "task-1779217" [ 1483.242247] env[63379]: _type = "Task" [ 1483.242247] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.252981] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779217, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.474604] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779214, 'name': CreateVM_Task, 'duration_secs': 0.567864} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.474773] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1483.475491] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.475618] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.475964] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1483.476246] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bc43587-e0db-4845-87f4-99b8f4bd033e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.480725] env[63379]: DEBUG nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1483.484999] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1483.484999] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52390b84-73bd-f4a3-d840-4fe3700f81ce" [ 1483.484999] env[63379]: _type = "Task" [ 1483.484999] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.494842] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52390b84-73bd-f4a3-d840-4fe3700f81ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.523558] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1483.523885] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1483.524069] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.524264] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1483.524420] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.524578] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1483.524831] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1483.525010] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1483.525210] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1483.525384] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1483.525563] env[63379]: DEBUG nova.virt.hardware [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.527051] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f35150-7da7-4284-a528-22f95ea0d289 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.538525] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99572cf6-7587-4097-aa15-02cbd7fee813 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.562793] env[63379]: DEBUG nova.compute.manager [req-0396f2cd-b3ff-45f3-b9c1-8ddfef10b8ca req-8eec5f6a-d07e-48e3-8f32-db63d494c773 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Received event network-vif-deleted-3c1937ec-1f32-4f60-909d-3726888392ea {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1483.563511] env[63379]: INFO nova.compute.manager [req-0396f2cd-b3ff-45f3-b9c1-8ddfef10b8ca req-8eec5f6a-d07e-48e3-8f32-db63d494c773 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Neutron deleted interface 3c1937ec-1f32-4f60-909d-3726888392ea; detaching it from the instance and deleting it from the info cache [ 1483.563876] env[63379]: DEBUG nova.network.neutron [req-0396f2cd-b3ff-45f3-b9c1-8ddfef10b8ca req-8eec5f6a-d07e-48e3-8f32-db63d494c773 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.580318] env[63379]: DEBUG nova.network.neutron [-] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.632348] env[63379]: DEBUG nova.network.neutron [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updated VIF entry in instance network info cache for port 1c6c710f-163e-4747-8489-53e8fdf2cf1f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1483.632778] env[63379]: DEBUG nova.network.neutron [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updating instance_info_cache with network_info: [{"id": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "address": "fa:16:3e:04:6f:f9", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c6c710f-16", "ovs_interfaceid": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.637366] env[63379]: DEBUG oslo_vmware.api [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779216, 'name': PowerOnVM_Task, 'duration_secs': 0.48956} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.637899] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1483.638121] env[63379]: INFO nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1483.638301] env[63379]: DEBUG nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1483.639200] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8cc5f7-fba3-44f5-8904-012565ead8b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.756290] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779217, 'name': Destroy_Task, 'duration_secs': 0.375581} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.756584] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Destroyed the VM [ 1483.756827] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1483.757333] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-46f0589b-80bd-4596-808f-0945c4a1e4c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.765370] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1483.765370] env[63379]: value = "task-1779218" [ 1483.765370] env[63379]: _type = "Task" [ 1483.765370] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.776072] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779218, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.781430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "5aa36799-251b-4933-8ccd-8125995b1f8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.781512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.896371] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Successfully created port: 0364d0f7-f24e-4ee3-aead-cb4a79933b69 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1483.999270] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52390b84-73bd-f4a3-d840-4fe3700f81ce, 'name': SearchDatastore_Task, 'duration_secs': 0.011468} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.999499] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.999981] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1484.000180] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.000268] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.000715] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1484.000879] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53b5af32-afee-4009-8bed-472d5841cf8c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.011124] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1484.011204] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1484.012135] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4b341d5-1aca-41a0-ab64-a35cf8628b92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.020282] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1484.020282] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da56a2-0ced-4fde-a98d-f4c6cd2e47dc" [ 1484.020282] env[63379]: _type = "Task" [ 1484.020282] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.031694] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da56a2-0ced-4fde-a98d-f4c6cd2e47dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.066905] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ae792aa-186a-45c1-8a24-a70e1f3c3fab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.078538] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc2e558-e6d6-4e56-a7c3-5ac5c0e9e055 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.095553] env[63379]: INFO nova.compute.manager [-] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Took 1.51 seconds to deallocate network for instance. [ 1484.125686] env[63379]: DEBUG nova.compute.manager [req-0396f2cd-b3ff-45f3-b9c1-8ddfef10b8ca req-8eec5f6a-d07e-48e3-8f32-db63d494c773 service nova] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Detach interface failed, port_id=3c1937ec-1f32-4f60-909d-3726888392ea, reason: Instance 76731b1b-af66-441b-8fe4-d5d7e7faf3ca could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1484.138782] env[63379]: DEBUG oslo_concurrency.lockutils [req-71a127e5-2d35-4f84-b031-897436ac2475 req-5fe2543e-ad91-41da-b249-dee4bbbc8a46 service nova] Releasing lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.167321] env[63379]: INFO nova.compute.manager [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Took 54.43 seconds to build instance. [ 1484.182945] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.728s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.186993] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.566s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.188483] env[63379]: INFO nova.compute.claims [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1484.281141] env[63379]: DEBUG oslo_vmware.api [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779218, 'name': RemoveSnapshot_Task, 'duration_secs': 0.510191} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.281861] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1484.282194] env[63379]: INFO nova.compute.manager [None req-dc52c26e-6b1c-4125-abd8-66e30302a87f tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Took 14.66 seconds to snapshot the instance on the hypervisor. [ 1484.545715] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da56a2-0ced-4fde-a98d-f4c6cd2e47dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010522} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.546619] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e79daf5-1963-4f3a-a908-0a40040daba2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.557817] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1484.557817] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8239e-2902-9bbe-5141-05a415607c49" [ 1484.557817] env[63379]: _type = "Task" [ 1484.557817] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.563209] env[63379]: DEBUG nova.network.neutron [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Successfully updated port: 73ef0a69-1fcf-4176-8fc8-5a95ef6add57 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1484.571190] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8239e-2902-9bbe-5141-05a415607c49, 'name': SearchDatastore_Task, 'duration_secs': 0.010455} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.571477] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.571733] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 04234ba7-24a3-48e5-9f62-6f4dddd0054a/04234ba7-24a3-48e5-9f62-6f4dddd0054a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1484.571998] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30f03523-d1fd-455d-9b44-ae5979ef27bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.582076] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1484.582076] env[63379]: value = "task-1779219" [ 1484.582076] env[63379]: _type = "Task" [ 1484.582076] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.595792] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.604492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "158fe346-93f5-422b-877a-8423547da58f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.604765] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "158fe346-93f5-422b-877a-8423547da58f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.607495] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.649249] env[63379]: DEBUG nova.compute.manager [req-80af7c3a-c4fe-46ef-bad7-034c5ae1e37e req-fcaed8f4-e09e-4f7f-be75-b063050438d2 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Received event network-vif-plugged-73ef0a69-1fcf-4176-8fc8-5a95ef6add57 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1484.649488] env[63379]: DEBUG oslo_concurrency.lockutils [req-80af7c3a-c4fe-46ef-bad7-034c5ae1e37e req-fcaed8f4-e09e-4f7f-be75-b063050438d2 service nova] Acquiring lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.649700] env[63379]: DEBUG oslo_concurrency.lockutils [req-80af7c3a-c4fe-46ef-bad7-034c5ae1e37e req-fcaed8f4-e09e-4f7f-be75-b063050438d2 service nova] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.649871] env[63379]: DEBUG oslo_concurrency.lockutils [req-80af7c3a-c4fe-46ef-bad7-034c5ae1e37e req-fcaed8f4-e09e-4f7f-be75-b063050438d2 service nova] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.650053] env[63379]: DEBUG nova.compute.manager [req-80af7c3a-c4fe-46ef-bad7-034c5ae1e37e req-fcaed8f4-e09e-4f7f-be75-b063050438d2 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] No waiting events found dispatching network-vif-plugged-73ef0a69-1fcf-4176-8fc8-5a95ef6add57 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1484.650221] env[63379]: WARNING nova.compute.manager [req-80af7c3a-c4fe-46ef-bad7-034c5ae1e37e req-fcaed8f4-e09e-4f7f-be75-b063050438d2 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Received unexpected event network-vif-plugged-73ef0a69-1fcf-4176-8fc8-5a95ef6add57 for instance with vm_state building and task_state spawning. [ 1484.671014] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b468f1e-7338-4565-ab83-220e4eee1afb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.170s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.758947] env[63379]: INFO nova.scheduler.client.report [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Deleted allocation for migration 786df903-0cb0-4f51-a75d-824fa35e0a15 [ 1485.066473] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "refresh_cache-bc7baa1a-f65d-41d4-ad86-de041fbb2306" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.066878] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquired lock "refresh_cache-bc7baa1a-f65d-41d4-ad86-de041fbb2306" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.066878] env[63379]: DEBUG nova.network.neutron [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.097145] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514932} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.097784] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 04234ba7-24a3-48e5-9f62-6f4dddd0054a/04234ba7-24a3-48e5-9f62-6f4dddd0054a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1485.098353] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1485.098791] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce7c91ff-5127-4f3c-8a48-e1e88f52beb9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.106840] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1485.106840] env[63379]: value = "task-1779220" [ 1485.106840] env[63379]: _type = "Task" [ 1485.106840] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.120132] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.173491] env[63379]: DEBUG nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1485.267204] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3175b1b6-4e9a-42e5-959a-d95942b5064f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 48.088s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.620750] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779220, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069517} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.620750] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1485.623418] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd171a36-4b7c-4dc8-8832-15d965a6048b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.629109] env[63379]: DEBUG nova.network.neutron [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1485.631869] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "48c0d20e-adc4-40a9-888c-ffea363f6edb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.632233] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.632462] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "48c0d20e-adc4-40a9-888c-ffea363f6edb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.632678] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.632839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.653961] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 04234ba7-24a3-48e5-9f62-6f4dddd0054a/04234ba7-24a3-48e5-9f62-6f4dddd0054a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1485.656941] env[63379]: INFO nova.compute.manager [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Terminating instance [ 1485.661585] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1224686f-9a12-4194-8ff2-9d5fbe05787a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.679586] env[63379]: DEBUG nova.compute.manager [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1485.679749] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1485.685899] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d92735b-8b21-4bb3-93ff-ddcfc1a79d59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.702707] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1485.704686] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-931cda93-e4a6-4d39-a52a-ead8fd3c6347 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.707036] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1485.707036] env[63379]: value = "task-1779221" [ 1485.707036] env[63379]: _type = "Task" [ 1485.707036] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.707963] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.719742] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779221, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.797048] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1485.797048] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1485.797048] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] 48c0d20e-adc4-40a9-888c-ffea363f6edb {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1485.797048] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ceda8da0-2a9b-42dd-ad74-2d4937ed8d10 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.803191] env[63379]: DEBUG oslo_vmware.api [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1485.803191] env[63379]: value = "task-1779223" [ 1485.803191] env[63379]: _type = "Task" [ 1485.803191] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.808452] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa97288c-ba59-41b8-8168-745535cdeec5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.817849] env[63379]: DEBUG oslo_vmware.api [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.820864] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ddec7c-70a3-4ec7-9f92-0ef6689c55d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.859554] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319cfc69-557d-4a03-9e2b-c767d5c41933 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.869853] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3134c477-deae-4075-a209-afc6038bb549 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.885792] env[63379]: DEBUG nova.compute.provider_tree [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1486.191162] env[63379]: DEBUG nova.network.neutron [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Updating instance_info_cache with network_info: [{"id": "73ef0a69-1fcf-4176-8fc8-5a95ef6add57", "address": "fa:16:3e:0c:8e:df", "network": {"id": "bf906e1c-2386-4e0c-b286-5c6870a9bc92", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1191943808-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee7362bde75a4a5aa7a24a09402ad153", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73ef0a69-1f", "ovs_interfaceid": "73ef0a69-1fcf-4176-8fc8-5a95ef6add57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.227311] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779221, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.252818] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Successfully updated port: 85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1486.314827] env[63379]: DEBUG oslo_vmware.api [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15802} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.315128] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1486.315322] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1486.315502] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1486.315679] env[63379]: INFO nova.compute.manager [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1486.315947] env[63379]: DEBUG oslo.service.loopingcall [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.316168] env[63379]: DEBUG nova.compute.manager [-] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1486.316682] env[63379]: DEBUG nova.network.neutron [-] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1486.388672] env[63379]: DEBUG nova.scheduler.client.report [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1486.515194] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "f10fe64d-a09e-488a-b609-3e38922cf2e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.515438] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.699416] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Releasing lock "refresh_cache-bc7baa1a-f65d-41d4-ad86-de041fbb2306" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.699416] env[63379]: DEBUG nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Instance network_info: |[{"id": "73ef0a69-1fcf-4176-8fc8-5a95ef6add57", "address": "fa:16:3e:0c:8e:df", "network": {"id": "bf906e1c-2386-4e0c-b286-5c6870a9bc92", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1191943808-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee7362bde75a4a5aa7a24a09402ad153", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73ef0a69-1f", "ovs_interfaceid": "73ef0a69-1fcf-4176-8fc8-5a95ef6add57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1486.699416] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:8e:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f52a458-d157-48a3-b4e2-b8cc0779afe2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73ef0a69-1fcf-4176-8fc8-5a95ef6add57', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1486.706576] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Creating folder: Project (ee7362bde75a4a5aa7a24a09402ad153). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1486.707129] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f7d2817-fad5-41c4-9bf0-f7576bfd3184 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.721670] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779221, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.723523] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Created folder: Project (ee7362bde75a4a5aa7a24a09402ad153) in parent group-v369214. [ 1486.726113] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Creating folder: Instances. Parent ref: group-v369315. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1486.726113] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36b56262-4687-495c-83c8-28efbe519f22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.736412] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Created folder: Instances in parent group-v369315. [ 1486.736412] env[63379]: DEBUG oslo.service.loopingcall [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.736553] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1486.736747] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e84561fb-ae33-40a0-a395-631ae4f1ca2e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.757925] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1486.757925] env[63379]: value = "task-1779226" [ 1486.757925] env[63379]: _type = "Task" [ 1486.757925] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.768383] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779226, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.859148] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Received event network-changed-73ef0a69-1fcf-4176-8fc8-5a95ef6add57 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1486.859354] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Refreshing instance network info cache due to event network-changed-73ef0a69-1fcf-4176-8fc8-5a95ef6add57. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1486.859659] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Acquiring lock "refresh_cache-bc7baa1a-f65d-41d4-ad86-de041fbb2306" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.859783] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Acquired lock "refresh_cache-bc7baa1a-f65d-41d4-ad86-de041fbb2306" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.859890] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Refreshing network info cache for port 73ef0a69-1fcf-4176-8fc8-5a95ef6add57 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.894409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.707s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.894794] env[63379]: DEBUG nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1486.897331] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.468s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.897508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.899592] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.358s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.900985] env[63379]: INFO nova.compute.claims [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.927474] env[63379]: INFO nova.scheduler.client.report [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Deleted allocations for instance 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9 [ 1487.225746] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779221, 'name': ReconfigVM_Task, 'duration_secs': 1.329717} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.226252] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 04234ba7-24a3-48e5-9f62-6f4dddd0054a/04234ba7-24a3-48e5-9f62-6f4dddd0054a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1487.229359] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a61b3dfe-9fd4-4b4e-a39e-c7bb75d3bf40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.234680] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1487.234680] env[63379]: value = "task-1779227" [ 1487.234680] env[63379]: _type = "Task" [ 1487.234680] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.249136] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779227, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.271884] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779226, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.327633] env[63379]: DEBUG nova.network.neutron [-] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.405492] env[63379]: DEBUG nova.compute.utils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1487.410905] env[63379]: DEBUG nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1487.410905] env[63379]: DEBUG nova.network.neutron [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1487.437437] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2f5d20de-5285-451f-ab9b-c45cd36f476c tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "8a7a3a54-ca4f-4860-a976-7d6b1212b9c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.581s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.465920] env[63379]: DEBUG nova.policy [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09fe654a390b4ac49b07f295801a6695', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a35cb189afd489591fe9d6e85640d4a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1487.746143] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779227, 'name': Rename_Task, 'duration_secs': 0.157719} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.746143] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1487.746143] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d565c89-a24b-4ee9-9dda-302bc7beeff4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.753161] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1487.753161] env[63379]: value = "task-1779228" [ 1487.753161] env[63379]: _type = "Task" [ 1487.753161] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.761541] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.770450] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779226, 'name': CreateVM_Task, 'duration_secs': 0.564648} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.770650] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1487.772560] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.772889] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.774232] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1487.774232] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-179c1158-958d-4f28-9c4c-37b8951c2bcf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.778422] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Updated VIF entry in instance network info cache for port 73ef0a69-1fcf-4176-8fc8-5a95ef6add57. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1487.778506] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Updating instance_info_cache with network_info: [{"id": "73ef0a69-1fcf-4176-8fc8-5a95ef6add57", "address": "fa:16:3e:0c:8e:df", "network": {"id": "bf906e1c-2386-4e0c-b286-5c6870a9bc92", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1191943808-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee7362bde75a4a5aa7a24a09402ad153", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f52a458-d157-48a3-b4e2-b8cc0779afe2", "external-id": "nsx-vlan-transportzone-403", "segmentation_id": 403, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73ef0a69-1f", "ovs_interfaceid": "73ef0a69-1fcf-4176-8fc8-5a95ef6add57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.784472] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1487.784472] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5263fd9e-d1f2-d272-04e8-32d911040370" [ 1487.784472] env[63379]: _type = "Task" [ 1487.784472] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.795590] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5263fd9e-d1f2-d272-04e8-32d911040370, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.831921] env[63379]: INFO nova.compute.manager [-] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Took 1.52 seconds to deallocate network for instance. [ 1487.847702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "55fb6899-0321-4bf2-bf3f-2e87dd479433" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.847702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.847702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "55fb6899-0321-4bf2-bf3f-2e87dd479433-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.847702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.847702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.856344] env[63379]: INFO nova.compute.manager [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Terminating instance [ 1487.861610] env[63379]: DEBUG nova.compute.manager [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1487.861610] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1487.865156] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527d0496-1136-4342-b6ec-a87f1202f24f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.878634] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1487.878634] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6adf3568-510c-4bbe-af39-612d17fd5b7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.887432] env[63379]: DEBUG oslo_vmware.api [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1487.887432] env[63379]: value = "task-1779229" [ 1487.887432] env[63379]: _type = "Task" [ 1487.887432] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.897994] env[63379]: DEBUG oslo_vmware.api [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.915623] env[63379]: DEBUG nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1487.974565] env[63379]: DEBUG nova.network.neutron [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Successfully created port: 5c6da110-b3ed-4065-94b0-004b98fd1363 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1488.269430] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779228, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.289083] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Releasing lock "refresh_cache-bc7baa1a-f65d-41d4-ad86-de041fbb2306" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.289362] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Received event network-changed-ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1488.289534] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Refreshing instance network info cache due to event network-changed-ef820562-0de4-462d-a51d-13e4a4929719. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1488.289744] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.289891] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.290067] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Refreshing network info cache for port ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1488.305022] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5263fd9e-d1f2-d272-04e8-32d911040370, 'name': SearchDatastore_Task, 'duration_secs': 0.01386} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.305804] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.306072] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1488.306317] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.306468] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.306649] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1488.309945] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-204e4fd5-5ccd-41d9-9d4c-a3c30cda820c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.323020] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1488.323020] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1488.323872] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94ebac74-2230-4a99-a6f6-6bc4434b7374 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.336494] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1488.336494] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52876e56-974e-cf38-daf3-694a8d703307" [ 1488.336494] env[63379]: _type = "Task" [ 1488.336494] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.341298] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.348536] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52876e56-974e-cf38-daf3-694a8d703307, 'name': SearchDatastore_Task, 'duration_secs': 0.010351} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.353851] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d57e1434-50cc-406f-a20b-e8a67d30cc58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.358645] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1488.358645] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527f6a25-f91b-ad48-2189-748e25a19ceb" [ 1488.358645] env[63379]: _type = "Task" [ 1488.358645] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.374432] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527f6a25-f91b-ad48-2189-748e25a19ceb, 'name': SearchDatastore_Task, 'duration_secs': 0.009148} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.374872] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.374977] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] bc7baa1a-f65d-41d4-ad86-de041fbb2306/bc7baa1a-f65d-41d4-ad86-de041fbb2306.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1488.375263] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-455c004a-6bb4-42d2-baf7-d7ea2e29450f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.383257] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1488.383257] env[63379]: value = "task-1779230" [ 1488.383257] env[63379]: _type = "Task" [ 1488.383257] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.394709] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.402658] env[63379]: DEBUG oslo_vmware.api [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779229, 'name': PowerOffVM_Task, 'duration_secs': 0.201622} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.403604] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1488.403604] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1488.403604] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8fbcc4c-d808-4bbf-bd95-abc84c140a06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.481370] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a1b75c-1f01-4728-9725-0537b46351a4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.496709] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c253db3d-254b-4ea1-8cbb-5c91ec5ae47d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.540110] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647ff593-7a8b-4121-aca9-b12da6b01f24 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.542868] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1488.543104] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1488.543408] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Deleting the datastore file [datastore1] 55fb6899-0321-4bf2-bf3f-2e87dd479433 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1488.543541] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61e8e32e-679d-4e43-8bd3-0b6464d828d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.550437] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04469f79-ec28-4107-b399-438b4ee7b71f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.556479] env[63379]: DEBUG oslo_vmware.api [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for the task: (returnval){ [ 1488.556479] env[63379]: value = "task-1779232" [ 1488.556479] env[63379]: _type = "Task" [ 1488.556479] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.569181] env[63379]: DEBUG nova.compute.provider_tree [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.578395] env[63379]: DEBUG oslo_vmware.api [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.625254] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.625578] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.711252] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Successfully updated port: 0364d0f7-f24e-4ee3-aead-cb4a79933b69 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1488.767249] env[63379]: DEBUG oslo_vmware.api [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779228, 'name': PowerOnVM_Task, 'duration_secs': 0.645502} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.767424] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.767645] env[63379]: INFO nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Took 10.71 seconds to spawn the instance on the hypervisor. [ 1488.767912] env[63379]: DEBUG nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1488.768807] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc2dbb5-2ece-4896-aa26-16323934a703 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.896394] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779230, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.928781] env[63379]: DEBUG nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1488.968773] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1488.969289] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1488.969598] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1488.970081] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1488.970429] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1488.970705] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1488.971375] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1488.972102] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1488.972102] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1488.972102] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1488.972382] env[63379]: DEBUG nova.virt.hardware [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1488.974403] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f1917e-65db-4d7a-b38f-004ba2ba0b9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.989247] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed38157-7e17-4384-8b0f-64c0743e8959 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.054150] env[63379]: DEBUG nova.compute.manager [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Received event network-vif-deleted-58a83e9a-4269-4e0e-8eb8-2d5b517e605f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1489.054150] env[63379]: DEBUG nova.compute.manager [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received event network-vif-plugged-0364d0f7-f24e-4ee3-aead-cb4a79933b69 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1489.054150] env[63379]: DEBUG oslo_concurrency.lockutils [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] Acquiring lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.054297] env[63379]: DEBUG oslo_concurrency.lockutils [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.054466] env[63379]: DEBUG oslo_concurrency.lockutils [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.054632] env[63379]: DEBUG nova.compute.manager [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] No waiting events found dispatching network-vif-plugged-0364d0f7-f24e-4ee3-aead-cb4a79933b69 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1489.054872] env[63379]: WARNING nova.compute.manager [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received unexpected event network-vif-plugged-0364d0f7-f24e-4ee3-aead-cb4a79933b69 for instance with vm_state building and task_state spawning. [ 1489.055278] env[63379]: DEBUG nova.compute.manager [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received event network-changed-0364d0f7-f24e-4ee3-aead-cb4a79933b69 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1489.055479] env[63379]: DEBUG nova.compute.manager [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Refreshing instance network info cache due to event network-changed-0364d0f7-f24e-4ee3-aead-cb4a79933b69. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1489.055674] env[63379]: DEBUG oslo_concurrency.lockutils [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] Acquiring lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.055813] env[63379]: DEBUG oslo_concurrency.lockutils [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] Acquired lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.055968] env[63379]: DEBUG nova.network.neutron [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Refreshing network info cache for port 0364d0f7-f24e-4ee3-aead-cb4a79933b69 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1489.070699] env[63379]: DEBUG oslo_vmware.api [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Task: {'id': task-1779232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.479892} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.071332] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1489.071332] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1489.071448] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1489.071563] env[63379]: INFO nova.compute.manager [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1489.071802] env[63379]: DEBUG oslo.service.loopingcall [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1489.072830] env[63379]: DEBUG nova.scheduler.client.report [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1489.077033] env[63379]: DEBUG nova.compute.manager [-] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1489.077033] env[63379]: DEBUG nova.network.neutron [-] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1489.217555] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updated VIF entry in instance network info cache for port ef820562-0de4-462d-a51d-13e4a4929719. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1489.217555] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.217555] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.287193] env[63379]: INFO nova.compute.manager [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Took 58.33 seconds to build instance. [ 1489.394955] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545689} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.395234] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] bc7baa1a-f65d-41d4-ad86-de041fbb2306/bc7baa1a-f65d-41d4-ad86-de041fbb2306.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1489.395449] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1489.395714] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3ab2bca-848d-47e3-a0f6-1b00b637476f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.402755] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1489.402755] env[63379]: value = "task-1779233" [ 1489.402755] env[63379]: _type = "Task" [ 1489.402755] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.411469] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779233, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.579696] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.580448] env[63379]: DEBUG nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1489.583128] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.954s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.584802] env[63379]: INFO nova.compute.claims [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1489.624517] env[63379]: DEBUG nova.network.neutron [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1489.719324] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.719579] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received event network-vif-plugged-85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1489.720250] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Acquiring lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.720250] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.720693] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.720693] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] No waiting events found dispatching network-vif-plugged-85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1489.721400] env[63379]: WARNING nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received unexpected event network-vif-plugged-85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 for instance with vm_state building and task_state spawning. [ 1489.721400] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received event network-changed-85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1489.721400] env[63379]: DEBUG nova.compute.manager [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Refreshing instance network info cache due to event network-changed-85bd2ccd-417b-4f6c-9e65-c41d8adb52d2. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1489.721556] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Acquiring lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.730752] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.731087] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.787383] env[63379]: DEBUG nova.network.neutron [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.794803] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6a6ebe7-3b82-4244-b8b5-99342198a27f tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.285s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.922969] env[63379]: DEBUG nova.network.neutron [-] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.927948] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779233, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.259242} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.927948] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1489.927948] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7819ace0-68a0-4f99-9ec3-46d80185c253 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.969221] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] bc7baa1a-f65d-41d4-ad86-de041fbb2306/bc7baa1a-f65d-41d4-ad86-de041fbb2306.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1489.970165] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc01d4d4-86a4-438a-99f0-04b42a43281b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.004188] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1490.004188] env[63379]: value = "task-1779234" [ 1490.004188] env[63379]: _type = "Task" [ 1490.004188] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.018214] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779234, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.024457] env[63379]: DEBUG nova.network.neutron [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Successfully updated port: 5c6da110-b3ed-4065-94b0-004b98fd1363 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1490.090881] env[63379]: DEBUG nova.compute.utils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1490.100025] env[63379]: DEBUG nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1490.100025] env[63379]: DEBUG nova.network.neutron [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1490.185395] env[63379]: DEBUG nova.policy [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43f89be3665844e28cb9e4675f712d64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9a0f0e3c07545b889ed575e21b131dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1490.233324] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "f082cdd7-228e-4100-b301-5af6daea9b36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.233666] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.290500] env[63379]: DEBUG oslo_concurrency.lockutils [req-f8ac8227-04be-4ebc-9f04-9622d7f38e28 req-a45f3df2-c58c-4d70-80e0-50258f1dd416 service nova] Releasing lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.290764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.290896] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1490.300330] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1490.381827] env[63379]: DEBUG nova.compute.manager [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Received event network-changed-1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1490.382369] env[63379]: DEBUG nova.compute.manager [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Refreshing instance network info cache due to event network-changed-1c6c710f-163e-4747-8489-53e8fdf2cf1f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1490.382672] env[63379]: DEBUG oslo_concurrency.lockutils [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] Acquiring lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.383212] env[63379]: DEBUG oslo_concurrency.lockutils [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] Acquired lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.383608] env[63379]: DEBUG nova.network.neutron [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Refreshing network info cache for port 1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.426425] env[63379]: INFO nova.compute.manager [-] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Took 1.35 seconds to deallocate network for instance. [ 1490.515982] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.528603] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.528710] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquired lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.528853] env[63379]: DEBUG nova.network.neutron [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1490.597651] env[63379]: DEBUG nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1490.672911] env[63379]: DEBUG nova.network.neutron [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Successfully created port: 4b39f7fe-6ef6-4804-b4b1-102adc940d55 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.823765] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.865749] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1490.940277] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.018993] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779234, 'name': ReconfigVM_Task, 'duration_secs': 0.825885} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.019323] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Reconfigured VM instance instance-00000020 to attach disk [datastore1] bc7baa1a-f65d-41d4-ad86-de041fbb2306/bc7baa1a-f65d-41d4-ad86-de041fbb2306.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1491.020338] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3173cc9-39d1-4e48-80ed-572fa4f40f62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.029371] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1491.029371] env[63379]: value = "task-1779235" [ 1491.029371] env[63379]: _type = "Task" [ 1491.029371] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.043227] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779235, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.086757] env[63379]: DEBUG nova.network.neutron [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1491.349415] env[63379]: DEBUG nova.network.neutron [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Updating instance_info_cache with network_info: [{"id": "5c6da110-b3ed-4065-94b0-004b98fd1363", "address": "fa:16:3e:63:c5:8b", "network": {"id": "db8165e6-d149-447f-8124-dd0f145b95ee", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1894207473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a35cb189afd489591fe9d6e85640d4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6da110-b3", "ovs_interfaceid": "5c6da110-b3ed-4065-94b0-004b98fd1363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.405458] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bd677c-9622-4b08-95a7-d5803045b7d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.415315] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8544e963-91bc-44ad-b772-3c5a74b7c828 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.450504] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0fb97f-5043-4eb7-b37a-755bc463f502 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.458705] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a70ee3-649d-4819-a902-ce193beff64f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.474138] env[63379]: DEBUG nova.compute.provider_tree [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.542478] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779235, 'name': Rename_Task, 'duration_secs': 0.346411} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.543029] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1491.543412] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8ddb9a6-c153-4f17-8686-2bf3b4dd59c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.550509] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1491.550509] env[63379]: value = "task-1779236" [ 1491.550509] env[63379]: _type = "Task" [ 1491.550509] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.559100] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779236, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.612297] env[63379]: DEBUG nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1491.648193] env[63379]: DEBUG nova.compute.manager [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Received event network-vif-deleted-a3f7ad68-1a71-4217-91b5-0d8a762a15c5 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1491.648372] env[63379]: DEBUG nova.compute.manager [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Received event network-vif-plugged-5c6da110-b3ed-4065-94b0-004b98fd1363 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1491.648573] env[63379]: DEBUG oslo_concurrency.lockutils [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] Acquiring lock "a78feafb-00bc-44c4-acd3-a36fb8a81767-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.648865] env[63379]: DEBUG oslo_concurrency.lockutils [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.649019] env[63379]: DEBUG oslo_concurrency.lockutils [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.649201] env[63379]: DEBUG nova.compute.manager [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] No waiting events found dispatching network-vif-plugged-5c6da110-b3ed-4065-94b0-004b98fd1363 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1491.649419] env[63379]: WARNING nova.compute.manager [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Received unexpected event network-vif-plugged-5c6da110-b3ed-4065-94b0-004b98fd1363 for instance with vm_state building and task_state spawning. [ 1491.649666] env[63379]: DEBUG nova.compute.manager [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Received event network-changed-5c6da110-b3ed-4065-94b0-004b98fd1363 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1491.649759] env[63379]: DEBUG nova.compute.manager [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Refreshing instance network info cache due to event network-changed-5c6da110-b3ed-4065-94b0-004b98fd1363. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1491.649903] env[63379]: DEBUG oslo_concurrency.lockutils [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] Acquiring lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.655361] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1491.655650] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1491.655832] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.656402] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1491.656402] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.656402] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1491.656553] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1491.656715] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1491.656946] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1491.657073] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1491.657256] env[63379]: DEBUG nova.virt.hardware [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1491.658858] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361c6a93-01ae-4693-bcef-79fe6c2a9c8c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.668974] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ae685f-6613-4107-9914-999f0cdb3b1a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.710789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.711799] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.756263] env[63379]: DEBUG nova.network.neutron [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updated VIF entry in instance network info cache for port 1c6c710f-163e-4747-8489-53e8fdf2cf1f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.756663] env[63379]: DEBUG nova.network.neutron [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updating instance_info_cache with network_info: [{"id": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "address": "fa:16:3e:04:6f:f9", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c6c710f-16", "ovs_interfaceid": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.819403] env[63379]: DEBUG nova.network.neutron [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Updating instance_info_cache with network_info: [{"id": "85bd2ccd-417b-4f6c-9e65-c41d8adb52d2", "address": "fa:16:3e:d5:75:9e", "network": {"id": "1dfe8453-5b1c-42db-88c5-bdb3a1920c3d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-511080467", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5a24f297-626b-4461-8c8d-1140fe436836", "external-id": "nsx-vlan-transportzone-604", "segmentation_id": 604, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85bd2ccd-41", "ovs_interfaceid": "85bd2ccd-417b-4f6c-9e65-c41d8adb52d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0364d0f7-f24e-4ee3-aead-cb4a79933b69", "address": "fa:16:3e:44:34:07", "network": {"id": "bf8961ff-eddc-4cf1-9003-ce3123ac3074", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1867067329", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364d0f7-f2", "ovs_interfaceid": "0364d0f7-f24e-4ee3-aead-cb4a79933b69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.859129] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Releasing lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.859129] env[63379]: DEBUG nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Instance network_info: |[{"id": "5c6da110-b3ed-4065-94b0-004b98fd1363", "address": "fa:16:3e:63:c5:8b", "network": {"id": "db8165e6-d149-447f-8124-dd0f145b95ee", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1894207473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a35cb189afd489591fe9d6e85640d4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6da110-b3", "ovs_interfaceid": "5c6da110-b3ed-4065-94b0-004b98fd1363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1491.859129] env[63379]: DEBUG oslo_concurrency.lockutils [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] Acquired lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.859359] env[63379]: DEBUG nova.network.neutron [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Refreshing network info cache for port 5c6da110-b3ed-4065-94b0-004b98fd1363 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1491.861059] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:c5:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35fcdc55-dc29-451b-ad56-3a03b044dc81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c6da110-b3ed-4065-94b0-004b98fd1363', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1491.868545] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Creating folder: Project (5a35cb189afd489591fe9d6e85640d4a). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.869103] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-339cc588-ba45-47a0-b9db-1946ff823cf8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.881883] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Created folder: Project (5a35cb189afd489591fe9d6e85640d4a) in parent group-v369214. [ 1491.882101] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Creating folder: Instances. Parent ref: group-v369318. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.882350] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80769aa0-3d31-4aa9-a535-82d862a9e585 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.893021] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Created folder: Instances in parent group-v369318. [ 1491.893210] env[63379]: DEBUG oslo.service.loopingcall [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1491.893413] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1491.894031] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1be916f7-4d88-4fcd-bfb9-bfcfb620747e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.914267] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1491.914267] env[63379]: value = "task-1779239" [ 1491.914267] env[63379]: _type = "Task" [ 1491.914267] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.921918] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779239, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.976722] env[63379]: DEBUG nova.scheduler.client.report [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1492.061098] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779236, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.260188] env[63379]: DEBUG oslo_concurrency.lockutils [req-3b0584c6-5229-4ad9-b278-a4e6c915c0c4 req-41a89724-8367-4c3e-915f-03eb9c792c5f service nova] Releasing lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.322297] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Releasing lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.322732] env[63379]: DEBUG nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Instance network_info: |[{"id": "85bd2ccd-417b-4f6c-9e65-c41d8adb52d2", "address": "fa:16:3e:d5:75:9e", "network": {"id": "1dfe8453-5b1c-42db-88c5-bdb3a1920c3d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-511080467", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5a24f297-626b-4461-8c8d-1140fe436836", "external-id": "nsx-vlan-transportzone-604", "segmentation_id": 604, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85bd2ccd-41", "ovs_interfaceid": "85bd2ccd-417b-4f6c-9e65-c41d8adb52d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0364d0f7-f24e-4ee3-aead-cb4a79933b69", "address": "fa:16:3e:44:34:07", "network": {"id": "bf8961ff-eddc-4cf1-9003-ce3123ac3074", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1867067329", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364d0f7-f2", "ovs_interfaceid": "0364d0f7-f24e-4ee3-aead-cb4a79933b69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1492.323108] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Acquired lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.323330] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Refreshing network info cache for port 85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.324661] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:75:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5a24f297-626b-4461-8c8d-1140fe436836', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85bd2ccd-417b-4f6c-9e65-c41d8adb52d2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:34:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0364d0f7-f24e-4ee3-aead-cb4a79933b69', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1492.334342] env[63379]: DEBUG oslo.service.loopingcall [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1492.335244] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1492.335477] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab0b4f73-9ea0-456e-8f7e-862e2d9d67d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.357975] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1492.357975] env[63379]: value = "task-1779240" [ 1492.357975] env[63379]: _type = "Task" [ 1492.357975] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.368629] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779240, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.426455] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779239, 'name': CreateVM_Task, 'duration_secs': 0.382038} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.426653] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1492.427408] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.427791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.427972] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1492.428269] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce925e2a-007e-4fef-a56b-ba5f367756d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.434371] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1492.434371] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e5055e-08cb-5b25-13d3-4fa1bb25c4d6" [ 1492.434371] env[63379]: _type = "Task" [ 1492.434371] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.443321] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e5055e-08cb-5b25-13d3-4fa1bb25c4d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.456789] env[63379]: DEBUG nova.compute.manager [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Received event network-changed-1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1492.456881] env[63379]: DEBUG nova.compute.manager [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Refreshing instance network info cache due to event network-changed-1c6c710f-163e-4747-8489-53e8fdf2cf1f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1492.457143] env[63379]: DEBUG oslo_concurrency.lockutils [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] Acquiring lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.457723] env[63379]: DEBUG oslo_concurrency.lockutils [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] Acquired lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.457723] env[63379]: DEBUG nova.network.neutron [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Refreshing network info cache for port 1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.485025] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.902s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.485282] env[63379]: DEBUG nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1492.487860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.650s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.489244] env[63379]: INFO nova.compute.claims [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1492.566019] env[63379]: DEBUG oslo_vmware.api [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779236, 'name': PowerOnVM_Task, 'duration_secs': 0.693416} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.566019] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1492.566019] env[63379]: INFO nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Took 10.76 seconds to spawn the instance on the hypervisor. [ 1492.566019] env[63379]: DEBUG nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.566019] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8baad6-37d3-4fea-8632-3e34f658f3b1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.690083] env[63379]: DEBUG nova.network.neutron [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Updated VIF entry in instance network info cache for port 5c6da110-b3ed-4065-94b0-004b98fd1363. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1492.690083] env[63379]: DEBUG nova.network.neutron [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Updating instance_info_cache with network_info: [{"id": "5c6da110-b3ed-4065-94b0-004b98fd1363", "address": "fa:16:3e:63:c5:8b", "network": {"id": "db8165e6-d149-447f-8124-dd0f145b95ee", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1894207473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a35cb189afd489591fe9d6e85640d4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6da110-b3", "ovs_interfaceid": "5c6da110-b3ed-4065-94b0-004b98fd1363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.875212] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779240, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.946422] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e5055e-08cb-5b25-13d3-4fa1bb25c4d6, 'name': SearchDatastore_Task, 'duration_secs': 0.011236} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.946733] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.946972] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1492.947416] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.947576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.947766] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.948054] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb89d229-c8eb-4622-9289-581a15fb5423 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.956817] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1492.957008] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1492.957772] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2a56eaf-a9f8-4a58-af8e-959585fec065 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.967378] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1492.967378] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521a166f-9c92-1c92-95fb-0dad9995220d" [ 1492.967378] env[63379]: _type = "Task" [ 1492.967378] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.978775] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521a166f-9c92-1c92-95fb-0dad9995220d, 'name': SearchDatastore_Task, 'duration_secs': 0.009629} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.979433] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89d7fa2f-84c5-4b7c-8b16-360c1d62a020 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.984306] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1492.984306] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a56b9c-cb81-a57c-ca39-89d4d734daf5" [ 1492.984306] env[63379]: _type = "Task" [ 1492.984306] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.995377] env[63379]: DEBUG nova.compute.utils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1493.000618] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a56b9c-cb81-a57c-ca39-89d4d734daf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.001253] env[63379]: DEBUG nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1493.001511] env[63379]: DEBUG nova.network.neutron [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1493.054977] env[63379]: DEBUG nova.policy [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb3162865ed94fcda9c38db2d10ddf18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53f21d581df140adb2012ea248c39a1f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1493.082437] env[63379]: INFO nova.compute.manager [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Took 58.74 seconds to build instance. [ 1493.192646] env[63379]: DEBUG oslo_concurrency.lockutils [req-37c65f0a-a7b1-4a3c-96a2-f53588a9cbb8 req-7306c0a4-5e4f-4305-95e1-930f587fcc8e service nova] Releasing lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.235453] env[63379]: DEBUG nova.network.neutron [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Successfully updated port: 4b39f7fe-6ef6-4804-b4b1-102adc940d55 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1493.323200] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Updated VIF entry in instance network info cache for port 85bd2ccd-417b-4f6c-9e65-c41d8adb52d2. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1493.323629] env[63379]: DEBUG nova.network.neutron [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Updating instance_info_cache with network_info: [{"id": "85bd2ccd-417b-4f6c-9e65-c41d8adb52d2", "address": "fa:16:3e:d5:75:9e", "network": {"id": "1dfe8453-5b1c-42db-88c5-bdb3a1920c3d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-511080467", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5a24f297-626b-4461-8c8d-1140fe436836", "external-id": "nsx-vlan-transportzone-604", "segmentation_id": 604, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85bd2ccd-41", "ovs_interfaceid": "85bd2ccd-417b-4f6c-9e65-c41d8adb52d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0364d0f7-f24e-4ee3-aead-cb4a79933b69", "address": "fa:16:3e:44:34:07", "network": {"id": "bf8961ff-eddc-4cf1-9003-ce3123ac3074", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1867067329", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a2519cafe6c84b12b560995b2d3dd84d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0364d0f7-f2", "ovs_interfaceid": "0364d0f7-f24e-4ee3-aead-cb4a79933b69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.371381] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779240, 'name': CreateVM_Task, 'duration_secs': 0.526488} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.371830] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1493.372817] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.373148] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.373627] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1493.374016] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e9ccbb2-8e67-4cd3-8a1b-bfeb1860d839 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.380159] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1493.380159] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed8b15-c987-ea27-9d1e-de99c380915c" [ 1493.380159] env[63379]: _type = "Task" [ 1493.380159] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.389697] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed8b15-c987-ea27-9d1e-de99c380915c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.404204] env[63379]: DEBUG nova.network.neutron [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updated VIF entry in instance network info cache for port 1c6c710f-163e-4747-8489-53e8fdf2cf1f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1493.405177] env[63379]: DEBUG nova.network.neutron [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updating instance_info_cache with network_info: [{"id": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "address": "fa:16:3e:04:6f:f9", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c6c710f-16", "ovs_interfaceid": "1c6c710f-163e-4747-8489-53e8fdf2cf1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.499954] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a56b9c-cb81-a57c-ca39-89d4d734daf5, 'name': SearchDatastore_Task, 'duration_secs': 0.008197} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.499954] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.499954] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a78feafb-00bc-44c4-acd3-a36fb8a81767/a78feafb-00bc-44c4-acd3-a36fb8a81767.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1493.499954] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be61ef26-bf07-4d00-90d2-25d0f7a94a3f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.505022] env[63379]: DEBUG nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1493.510145] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1493.510145] env[63379]: value = "task-1779241" [ 1493.510145] env[63379]: _type = "Task" [ 1493.510145] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.519973] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.584469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bebdc82a-fe4d-4a46-a2dc-3aab82c463e8 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.180s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.607634] env[63379]: DEBUG nova.network.neutron [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Successfully created port: 9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1493.736112] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "refresh_cache-ee36cc5f-61a1-4e4f-9cae-670f5868d90c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.736381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired lock "refresh_cache-ee36cc5f-61a1-4e4f-9cae-670f5868d90c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.736558] env[63379]: DEBUG nova.network.neutron [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1493.788835] env[63379]: DEBUG nova.compute.manager [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Received event network-vif-plugged-4b39f7fe-6ef6-4804-b4b1-102adc940d55 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1493.789013] env[63379]: DEBUG oslo_concurrency.lockutils [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] Acquiring lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.789177] env[63379]: DEBUG oslo_concurrency.lockutils [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.789256] env[63379]: DEBUG oslo_concurrency.lockutils [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.789416] env[63379]: DEBUG nova.compute.manager [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] No waiting events found dispatching network-vif-plugged-4b39f7fe-6ef6-4804-b4b1-102adc940d55 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1493.790250] env[63379]: WARNING nova.compute.manager [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Received unexpected event network-vif-plugged-4b39f7fe-6ef6-4804-b4b1-102adc940d55 for instance with vm_state building and task_state spawning. [ 1493.790250] env[63379]: DEBUG nova.compute.manager [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Received event network-changed-4b39f7fe-6ef6-4804-b4b1-102adc940d55 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1493.790250] env[63379]: DEBUG nova.compute.manager [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Refreshing instance network info cache due to event network-changed-4b39f7fe-6ef6-4804-b4b1-102adc940d55. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1493.790250] env[63379]: DEBUG oslo_concurrency.lockutils [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] Acquiring lock "refresh_cache-ee36cc5f-61a1-4e4f-9cae-670f5868d90c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.827808] env[63379]: DEBUG oslo_concurrency.lockutils [req-1b80ef0f-980a-45bb-bc40-6bfcd02e62e5 req-ce763775-f826-48da-a36d-8b8bf11338a6 service nova] Releasing lock "refresh_cache-07cc8cd7-8368-41dd-ae13-01c8275cac9e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.894179] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed8b15-c987-ea27-9d1e-de99c380915c, 'name': SearchDatastore_Task, 'duration_secs': 0.010292} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.897779] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.898058] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1493.898303] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.898456] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.898639] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1493.899170] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f77502e2-f16c-41cf-8fad-9baeaf053b29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.908709] env[63379]: DEBUG oslo_concurrency.lockutils [req-d558cf13-5264-4f4d-b236-dc89aa329eab req-21b1a202-0457-4aa6-b0cc-5bf8a819a808 service nova] Releasing lock "refresh_cache-04234ba7-24a3-48e5-9f62-6f4dddd0054a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.916478] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1493.916685] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1493.917544] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d612e6ed-9e17-4077-8d41-d65e49d2aff1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.926187] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1493.926187] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52919106-33f9-dfa6-359e-268e1cddec27" [ 1493.926187] env[63379]: _type = "Task" [ 1493.926187] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.939652] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52919106-33f9-dfa6-359e-268e1cddec27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.011530] env[63379]: INFO nova.virt.block_device [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Booting with volume 084d5362-d8e9-4034-9623-555ed06a1add at /dev/sda [ 1494.027679] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779241, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.053864] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13f64d8d-d2cc-4207-8d71-a3c74126752b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.065747] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4141c296-7760-4a24-92e8-ae4acaafed07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.078385] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e879a4-e701-4b2f-8419-71722cd1f798 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.085913] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c6055c4-8abb-4ff4-8016-2fc1b28c7d7d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.089564] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1494.104204] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e981821-30b7-4064-a0bd-b621b470e07a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.131346] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b4a4d4-54fe-439a-aeab-1693ee01f3fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.139107] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1168009d-2b10-40f9-9c81-253177674e81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.153883] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14858bf3-d9ab-482e-a8b1-c41b216d9f86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.168385] env[63379]: DEBUG nova.compute.provider_tree [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.181945] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0694b6f-7679-45d4-8367-7eeb77dc7b83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.189870] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853557d9-7033-462e-ad09-3024203b04bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.204365] env[63379]: DEBUG nova.virt.block_device [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updating existing volume attachment record: 9f8d0004-99f3-42fe-9b2d-a4481cb33675 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1494.281714] env[63379]: DEBUG nova.network.neutron [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1494.446057] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52919106-33f9-dfa6-359e-268e1cddec27, 'name': SearchDatastore_Task, 'duration_secs': 0.060789} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.446057] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9432ad2-23d5-4b83-957c-ed89c6aba0f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.449098] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.452303] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.453774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.453774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.453774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.457756] env[63379]: INFO nova.compute.manager [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Terminating instance [ 1494.460307] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1494.460307] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e62fd8-80ad-58a7-e659-b64cb74be6a1" [ 1494.460307] env[63379]: _type = "Task" [ 1494.460307] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.460824] env[63379]: DEBUG nova.compute.manager [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1494.461178] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.462039] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a36c131-2e73-497a-9377-5387d8e3f6ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.471960] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1494.475926] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcdca8e4-3991-406b-90a6-ccfd862a9919 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.477217] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e62fd8-80ad-58a7-e659-b64cb74be6a1, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.477625] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.477903] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 07cc8cd7-8368-41dd-ae13-01c8275cac9e/07cc8cd7-8368-41dd-ae13-01c8275cac9e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1494.478599] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc7ba828-da32-4c73-acc4-fdee1df9d811 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.483235] env[63379]: DEBUG oslo_vmware.api [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1494.483235] env[63379]: value = "task-1779242" [ 1494.483235] env[63379]: _type = "Task" [ 1494.483235] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.487374] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1494.487374] env[63379]: value = "task-1779243" [ 1494.487374] env[63379]: _type = "Task" [ 1494.487374] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.493451] env[63379]: DEBUG oslo_vmware.api [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.498495] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.499607] env[63379]: DEBUG nova.network.neutron [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Updating instance_info_cache with network_info: [{"id": "4b39f7fe-6ef6-4804-b4b1-102adc940d55", "address": "fa:16:3e:f6:77:3e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b39f7fe-6e", "ovs_interfaceid": "4b39f7fe-6ef6-4804-b4b1-102adc940d55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.524794] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779241, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544018} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.525684] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a78feafb-00bc-44c4-acd3-a36fb8a81767/a78feafb-00bc-44c4-acd3-a36fb8a81767.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1494.525935] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1494.526221] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ee281cf-9e3f-4a5a-b872-0edc5d449e98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.533404] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1494.533404] env[63379]: value = "task-1779244" [ 1494.533404] env[63379]: _type = "Task" [ 1494.533404] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.541277] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.622020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.683205] env[63379]: DEBUG nova.scheduler.client.report [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1494.997421] env[63379]: DEBUG oslo_vmware.api [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779242, 'name': PowerOffVM_Task, 'duration_secs': 0.298039} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.000229] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1495.000356] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1495.000609] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779243, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.000824] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2177eee-c7e1-45a2-a030-10774793adfb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.002444] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Releasing lock "refresh_cache-ee36cc5f-61a1-4e4f-9cae-670f5868d90c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.002831] env[63379]: DEBUG nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Instance network_info: |[{"id": "4b39f7fe-6ef6-4804-b4b1-102adc940d55", "address": "fa:16:3e:f6:77:3e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b39f7fe-6e", "ovs_interfaceid": "4b39f7fe-6ef6-4804-b4b1-102adc940d55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1495.003065] env[63379]: DEBUG oslo_concurrency.lockutils [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] Acquired lock "refresh_cache-ee36cc5f-61a1-4e4f-9cae-670f5868d90c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.003243] env[63379]: DEBUG nova.network.neutron [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Refreshing network info cache for port 4b39f7fe-6ef6-4804-b4b1-102adc940d55 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.004513] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:77:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b39f7fe-6ef6-4804-b4b1-102adc940d55', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1495.012185] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Creating folder: Project (c9a0f0e3c07545b889ed575e21b131dc). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1495.015723] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c4608ae-2a1b-4fa5-bcc6-58a8e4f81038 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.028795] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Created folder: Project (c9a0f0e3c07545b889ed575e21b131dc) in parent group-v369214. [ 1495.028795] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Creating folder: Instances. Parent ref: group-v369322. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1495.028795] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aa82ad5-90c6-4a62-b555-42fa79b5eaf0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.038348] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Created folder: Instances in parent group-v369322. [ 1495.038595] env[63379]: DEBUG oslo.service.loopingcall [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.038790] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1495.039014] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa146e27-2773-4b43-be63-bd2a90c0df67 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.056351] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148284} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.058847] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1495.059739] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55474ad8-389c-474a-934c-7b2141373174 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.063456] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1495.063456] env[63379]: value = "task-1779248" [ 1495.063456] env[63379]: _type = "Task" [ 1495.063456] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.086704] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] a78feafb-00bc-44c4-acd3-a36fb8a81767/a78feafb-00bc-44c4-acd3-a36fb8a81767.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1495.087565] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e108047-bf26-4e3b-b616-dbc0484a32a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.106961] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779248, 'name': CreateVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.109935] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1495.110143] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1495.110380] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Deleting the datastore file [datastore1] bc7baa1a-f65d-41d4-ad86-de041fbb2306 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1495.110571] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a7ee08a-4ea3-4696-bc53-12304245ac4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.115112] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1495.115112] env[63379]: value = "task-1779249" [ 1495.115112] env[63379]: _type = "Task" [ 1495.115112] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.122978] env[63379]: DEBUG oslo_vmware.api [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for the task: (returnval){ [ 1495.122978] env[63379]: value = "task-1779250" [ 1495.122978] env[63379]: _type = "Task" [ 1495.122978] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.128339] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779249, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.142139] env[63379]: DEBUG oslo_vmware.api [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.189869] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.190540] env[63379]: DEBUG nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1495.193859] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.372s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.195739] env[63379]: INFO nova.compute.claims [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1495.248554] env[63379]: DEBUG nova.compute.manager [req-afe02dc7-180e-4d70-b3a0-3bf2e98e5ac7 req-c00c719c-39f4-4c59-a4eb-59da55a45a43 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Received event network-vif-plugged-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1495.248637] env[63379]: DEBUG oslo_concurrency.lockutils [req-afe02dc7-180e-4d70-b3a0-3bf2e98e5ac7 req-c00c719c-39f4-4c59-a4eb-59da55a45a43 service nova] Acquiring lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.249517] env[63379]: DEBUG oslo_concurrency.lockutils [req-afe02dc7-180e-4d70-b3a0-3bf2e98e5ac7 req-c00c719c-39f4-4c59-a4eb-59da55a45a43 service nova] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.249517] env[63379]: DEBUG oslo_concurrency.lockutils [req-afe02dc7-180e-4d70-b3a0-3bf2e98e5ac7 req-c00c719c-39f4-4c59-a4eb-59da55a45a43 service nova] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.250368] env[63379]: DEBUG nova.compute.manager [req-afe02dc7-180e-4d70-b3a0-3bf2e98e5ac7 req-c00c719c-39f4-4c59-a4eb-59da55a45a43 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] No waiting events found dispatching network-vif-plugged-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1495.250368] env[63379]: WARNING nova.compute.manager [req-afe02dc7-180e-4d70-b3a0-3bf2e98e5ac7 req-c00c719c-39f4-4c59-a4eb-59da55a45a43 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Received unexpected event network-vif-plugged-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c for instance with vm_state building and task_state block_device_mapping. [ 1495.314487] env[63379]: DEBUG nova.network.neutron [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Updated VIF entry in instance network info cache for port 4b39f7fe-6ef6-4804-b4b1-102adc940d55. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1495.314880] env[63379]: DEBUG nova.network.neutron [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Updating instance_info_cache with network_info: [{"id": "4b39f7fe-6ef6-4804-b4b1-102adc940d55", "address": "fa:16:3e:f6:77:3e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b39f7fe-6e", "ovs_interfaceid": "4b39f7fe-6ef6-4804-b4b1-102adc940d55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.357348] env[63379]: DEBUG nova.network.neutron [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Successfully updated port: 9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1495.501802] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522457} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.502177] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 07cc8cd7-8368-41dd-ae13-01c8275cac9e/07cc8cd7-8368-41dd-ae13-01c8275cac9e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1495.502439] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1495.502751] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7a2c9fc-9800-443e-9dcf-ac10bbf9fe9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.511047] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1495.511047] env[63379]: value = "task-1779251" [ 1495.511047] env[63379]: _type = "Task" [ 1495.511047] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.518983] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.578565] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.578565] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.578565] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779248, 'name': CreateVM_Task, 'duration_secs': 0.376871} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.578742] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1495.579399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.579399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.579701] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1495.580205] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8071c06e-7a2a-4a6d-81e3-ea97b020a9e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.586045] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1495.586045] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52461f3b-d0b5-fc52-a1d1-24b9af6d8784" [ 1495.586045] env[63379]: _type = "Task" [ 1495.586045] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.594162] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52461f3b-d0b5-fc52-a1d1-24b9af6d8784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.624568] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779249, 'name': ReconfigVM_Task, 'duration_secs': 0.403484} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.624874] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Reconfigured VM instance instance-00000022 to attach disk [datastore1] a78feafb-00bc-44c4-acd3-a36fb8a81767/a78feafb-00bc-44c4-acd3-a36fb8a81767.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1495.625501] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a4793ec-2f92-4db7-9bcf-d4654674549e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.631964] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1495.631964] env[63379]: value = "task-1779252" [ 1495.631964] env[63379]: _type = "Task" [ 1495.631964] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.638142] env[63379]: DEBUG oslo_vmware.api [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Task: {'id': task-1779250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229418} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.638668] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1495.639412] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1495.639412] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1495.639412] env[63379]: INFO nova.compute.manager [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1495.639412] env[63379]: DEBUG oslo.service.loopingcall [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.639590] env[63379]: DEBUG nova.compute.manager [-] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1495.639782] env[63379]: DEBUG nova.network.neutron [-] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1495.644196] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779252, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.703212] env[63379]: DEBUG nova.compute.utils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1495.704585] env[63379]: DEBUG nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1495.704816] env[63379]: DEBUG nova.network.neutron [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1495.762313] env[63379]: DEBUG nova.policy [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43f89be3665844e28cb9e4675f712d64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9a0f0e3c07545b889ed575e21b131dc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1495.817317] env[63379]: DEBUG oslo_concurrency.lockutils [req-1825caea-7fc6-4e09-bd6a-3f87b09f852c req-356041d1-26bc-4c7a-bcc3-25ede65449ef service nova] Releasing lock "refresh_cache-ee36cc5f-61a1-4e4f-9cae-670f5868d90c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.865385] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquiring lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.865548] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquired lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.865722] env[63379]: DEBUG nova.network.neutron [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1496.020881] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068733} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.021778] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1496.022611] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a971d8-22b0-494e-b317-4ce75c96a06b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.051059] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 07cc8cd7-8368-41dd-ae13-01c8275cac9e/07cc8cd7-8368-41dd-ae13-01c8275cac9e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1496.052963] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9c33929-c959-4c8c-9f27-601752e53f4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.068631] env[63379]: DEBUG nova.network.neutron [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Successfully created port: da9aa440-961a-44c6-95bd-7e4d31987617 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1496.076482] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1496.076482] env[63379]: value = "task-1779253" [ 1496.076482] env[63379]: _type = "Task" [ 1496.076482] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.085406] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.096025] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52461f3b-d0b5-fc52-a1d1-24b9af6d8784, 'name': SearchDatastore_Task, 'duration_secs': 0.008494} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.096025] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.096185] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1496.096403] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.096558] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.096745] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1496.097017] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fdda7a0-fd36-44bf-b63f-42fbe0da3d9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.105676] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1496.105970] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1496.106753] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-741df9cf-a5c9-414c-ae54-f78f2d057abe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.112282] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1496.112282] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b2de15-56c4-9544-6a03-88b03878a4f2" [ 1496.112282] env[63379]: _type = "Task" [ 1496.112282] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.120848] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b2de15-56c4-9544-6a03-88b03878a4f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.142760] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779252, 'name': Rename_Task, 'duration_secs': 0.146112} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.143300] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1496.143591] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4143edb6-2d8d-4289-a97f-e738800c9a61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.150649] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1496.150649] env[63379]: value = "task-1779254" [ 1496.150649] env[63379]: _type = "Task" [ 1496.150649] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.160027] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.209029] env[63379]: DEBUG nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1496.312979] env[63379]: DEBUG nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1496.313561] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1496.313766] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1496.313969] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1496.314195] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1496.314353] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1496.314483] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1496.314683] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1496.314872] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1496.315474] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1496.315753] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1496.315993] env[63379]: DEBUG nova.virt.hardware [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1496.317758] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8feb4200-d816-4f5f-a878-cd376f23e4bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.333142] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0221ec1d-20f8-484d-ba6f-90394c3b153b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.403857] env[63379]: DEBUG nova.network.neutron [-] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.439281] env[63379]: DEBUG nova.network.neutron [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1496.590074] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779253, 'name': ReconfigVM_Task, 'duration_secs': 0.338063} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.590954] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 07cc8cd7-8368-41dd-ae13-01c8275cac9e/07cc8cd7-8368-41dd-ae13-01c8275cac9e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1496.592112] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c75cdf4-a981-48d8-bf62-cd54423fdf77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.598805] env[63379]: DEBUG nova.network.neutron [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updating instance_info_cache with network_info: [{"id": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "address": "fa:16:3e:78:f3:2f", "network": {"id": "e4e5aea3-500f-45e8-b507-11a67a599a0a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2078307430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53f21d581df140adb2012ea248c39a1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e56e6e1-92", "ovs_interfaceid": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.601292] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1496.601292] env[63379]: value = "task-1779255" [ 1496.601292] env[63379]: _type = "Task" [ 1496.601292] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.612744] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779255, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.630459] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b2de15-56c4-9544-6a03-88b03878a4f2, 'name': SearchDatastore_Task, 'duration_secs': 0.008923} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.631483] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c43b8993-a6a6-4907-b2ba-ad5bec03d9d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.640568] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1496.640568] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa20e3-697b-ac05-0fd2-5658ddd7d9ed" [ 1496.640568] env[63379]: _type = "Task" [ 1496.640568] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.649354] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa20e3-697b-ac05-0fd2-5658ddd7d9ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.660503] env[63379]: DEBUG oslo_vmware.api [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779254, 'name': PowerOnVM_Task, 'duration_secs': 0.471414} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.660772] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1496.661116] env[63379]: INFO nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Took 7.73 seconds to spawn the instance on the hypervisor. [ 1496.661209] env[63379]: DEBUG nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1496.661938] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecdb7bf-7357-4571-bcce-71b5859d020d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.770158] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1fb7c0-50be-4799-97be-f5b3124253cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.777218] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99262810-0396-4ded-a5e9-abf4f591918c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.807973] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465a01c1-ca38-491b-b406-d368c8f04411 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.816562] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d41b385a-5233-4cb3-8ed2-d288b6a03687 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.830153] env[63379]: DEBUG nova.compute.provider_tree [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.906539] env[63379]: INFO nova.compute.manager [-] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Took 1.27 seconds to deallocate network for instance. [ 1497.102246] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Releasing lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.102577] env[63379]: DEBUG nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance network_info: |[{"id": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "address": "fa:16:3e:78:f3:2f", "network": {"id": "e4e5aea3-500f-45e8-b507-11a67a599a0a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2078307430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53f21d581df140adb2012ea248c39a1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e56e6e1-92", "ovs_interfaceid": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1497.103188] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:f3:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1497.110491] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Creating folder: Project (53f21d581df140adb2012ea248c39a1f). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1497.113779] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ff3a41d-387a-4dbb-af2c-e40357d574de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.120523] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779255, 'name': Rename_Task, 'duration_secs': 0.173666} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.120839] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1497.122273] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91517d43-bc44-46a0-91c9-61e6a98dfe71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.125690] env[63379]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1497.125764] env[63379]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63379) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1497.126054] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Folder already exists: Project (53f21d581df140adb2012ea248c39a1f). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1497.126240] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Creating folder: Instances. Parent ref: group-v369278. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1497.127366] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b4fa751-7976-439f-bc49-aac3147fc01a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.128835] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1497.128835] env[63379]: value = "task-1779257" [ 1497.128835] env[63379]: _type = "Task" [ 1497.128835] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.136789] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.140101] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Created folder: Instances in parent group-v369278. [ 1497.140349] env[63379]: DEBUG oslo.service.loopingcall [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1497.140546] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1497.140774] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2677d1ef-9368-4cd8-bbca-98dd2816f080 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.164199] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa20e3-697b-ac05-0fd2-5658ddd7d9ed, 'name': SearchDatastore_Task, 'duration_secs': 0.01086} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.166071] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.166482] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ee36cc5f-61a1-4e4f-9cae-670f5868d90c/ee36cc5f-61a1-4e4f-9cae-670f5868d90c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1497.166826] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1497.166826] env[63379]: value = "task-1779259" [ 1497.166826] env[63379]: _type = "Task" [ 1497.166826] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.167102] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90563a6c-f5bc-462a-b611-c64eb525d9f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.182188] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779259, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.186591] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1497.186591] env[63379]: value = "task-1779260" [ 1497.186591] env[63379]: _type = "Task" [ 1497.186591] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.190454] env[63379]: INFO nova.compute.manager [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Took 55.59 seconds to build instance. [ 1497.196785] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.224568] env[63379]: DEBUG nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1497.252875] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1497.253302] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1497.253583] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1497.253904] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1497.254194] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1497.254469] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1497.254810] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1497.255181] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1497.255282] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1497.255435] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1497.255622] env[63379]: DEBUG nova.virt.hardware [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1497.256518] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd9e0b7-4fcb-4e35-a690-8056def868c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.264702] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50429912-d56f-4714-91cb-fc038ac0c443 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.284288] env[63379]: DEBUG nova.compute.manager [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Received event network-changed-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1497.284490] env[63379]: DEBUG nova.compute.manager [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Refreshing instance network info cache due to event network-changed-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1497.284762] env[63379]: DEBUG oslo_concurrency.lockutils [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] Acquiring lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.284888] env[63379]: DEBUG oslo_concurrency.lockutils [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] Acquired lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.285075] env[63379]: DEBUG nova.network.neutron [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Refreshing network info cache for port 9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1497.333252] env[63379]: DEBUG nova.scheduler.client.report [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1497.412640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.642859] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779257, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.682383] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779259, 'name': CreateVM_Task, 'duration_secs': 0.3962} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.682576] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1497.684420] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369286', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'name': 'volume-084d5362-d8e9-4034-9623-555ed06a1add', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'serial': '084d5362-d8e9-4034-9623-555ed06a1add'}, 'attachment_id': '9f8d0004-99f3-42fe-9b2d-a4481cb33675', 'boot_index': 0, 'mount_device': '/dev/sda', 'disk_bus': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63379) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1497.684695] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Root volume attach. Driver type: vmdk {{(pid=63379) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1497.686029] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689f8b79-f902-4dd6-8879-36ea0553a791 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.693424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f330eb17-cc85-4fe6-b482-23223731e8ae tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.454s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.709890] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dc1815-8a8a-4552-9268-d03c33fa1ffb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.712851] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779260, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.720041] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f96486-7395-4972-951e-6fe740d9ae0e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.737655] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4e3721fe-c437-4830-a464-8fd0e3d8a935 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.750725] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1497.750725] env[63379]: value = "task-1779261" [ 1497.750725] env[63379]: _type = "Task" [ 1497.750725] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.767190] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779261, 'name': RelocateVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.799170] env[63379]: DEBUG nova.compute.manager [req-e28b651d-ebea-476c-bc97-991343f97d39 req-4f5d1d9a-6517-487b-b84d-baa6040c3808 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Received event network-vif-plugged-da9aa440-961a-44c6-95bd-7e4d31987617 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1497.799277] env[63379]: DEBUG oslo_concurrency.lockutils [req-e28b651d-ebea-476c-bc97-991343f97d39 req-4f5d1d9a-6517-487b-b84d-baa6040c3808 service nova] Acquiring lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.799544] env[63379]: DEBUG oslo_concurrency.lockutils [req-e28b651d-ebea-476c-bc97-991343f97d39 req-4f5d1d9a-6517-487b-b84d-baa6040c3808 service nova] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.799670] env[63379]: DEBUG oslo_concurrency.lockutils [req-e28b651d-ebea-476c-bc97-991343f97d39 req-4f5d1d9a-6517-487b-b84d-baa6040c3808 service nova] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.799844] env[63379]: DEBUG nova.compute.manager [req-e28b651d-ebea-476c-bc97-991343f97d39 req-4f5d1d9a-6517-487b-b84d-baa6040c3808 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] No waiting events found dispatching network-vif-plugged-da9aa440-961a-44c6-95bd-7e4d31987617 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1497.800314] env[63379]: WARNING nova.compute.manager [req-e28b651d-ebea-476c-bc97-991343f97d39 req-4f5d1d9a-6517-487b-b84d-baa6040c3808 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Received unexpected event network-vif-plugged-da9aa440-961a-44c6-95bd-7e4d31987617 for instance with vm_state building and task_state spawning. [ 1497.840369] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.840489] env[63379]: DEBUG nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1497.848308] env[63379]: DEBUG nova.network.neutron [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Successfully updated port: da9aa440-961a-44c6-95bd-7e4d31987617 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1497.857182] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.987s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.858488] env[63379]: INFO nova.compute.claims [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1498.000682] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.001055] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.001420] env[63379]: INFO nova.compute.manager [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Rebooting instance [ 1498.083042] env[63379]: DEBUG nova.network.neutron [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updated VIF entry in instance network info cache for port 9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1498.083441] env[63379]: DEBUG nova.network.neutron [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updating instance_info_cache with network_info: [{"id": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "address": "fa:16:3e:78:f3:2f", "network": {"id": "e4e5aea3-500f-45e8-b507-11a67a599a0a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2078307430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53f21d581df140adb2012ea248c39a1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e56e6e1-92", "ovs_interfaceid": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.140689] env[63379]: DEBUG oslo_vmware.api [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779257, 'name': PowerOnVM_Task, 'duration_secs': 0.581247} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.140963] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1498.141412] env[63379]: INFO nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Took 14.66 seconds to spawn the instance on the hypervisor. [ 1498.141487] env[63379]: DEBUG nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1498.142287] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b4f358-1793-4e5e-931d-8bfab5b7589a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.196293] env[63379]: DEBUG nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1498.202324] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779260, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634734} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.202765] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ee36cc5f-61a1-4e4f-9cae-670f5868d90c/ee36cc5f-61a1-4e4f-9cae-670f5868d90c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1498.202987] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1498.203254] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2570a01c-6e84-442d-b2af-f951cbfa4339 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.210253] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1498.210253] env[63379]: value = "task-1779262" [ 1498.210253] env[63379]: _type = "Task" [ 1498.210253] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.220273] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779262, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.271802] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779261, 'name': RelocateVM_Task} progress is 19%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.362862] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "refresh_cache-aa44a4ff-14e5-42d2-a082-06fe0ae9646c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.363052] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired lock "refresh_cache-aa44a4ff-14e5-42d2-a082-06fe0ae9646c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.363243] env[63379]: DEBUG nova.network.neutron [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1498.365408] env[63379]: DEBUG nova.compute.utils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1498.368580] env[63379]: DEBUG nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1498.368783] env[63379]: DEBUG nova.network.neutron [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1498.409175] env[63379]: DEBUG nova.policy [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ebba7dcc5e6a48f4865a165c4554128a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fadf3c8628840efb6c8f6f99df21694', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1498.525963] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.526184] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquired lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.526363] env[63379]: DEBUG nova.network.neutron [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1498.586223] env[63379]: DEBUG oslo_concurrency.lockutils [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] Releasing lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.586492] env[63379]: DEBUG nova.compute.manager [req-31adfaca-8039-4103-98fa-a3cd5f5fae7d req-69c07da3-33cd-4190-8b07-ce9145e3db23 service nova] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Received event network-vif-deleted-73ef0a69-1fcf-4176-8fc8-5a95ef6add57 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1498.660030] env[63379]: INFO nova.compute.manager [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Took 60.37 seconds to build instance. [ 1498.700980] env[63379]: DEBUG nova.network.neutron [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Successfully created port: d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1498.723552] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779262, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115101} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.723552] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1498.723552] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e586fa0-2c3c-456e-aead-e1e641a57061 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.746358] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] ee36cc5f-61a1-4e4f-9cae-670f5868d90c/ee36cc5f-61a1-4e4f-9cae-670f5868d90c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1498.747478] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.747717] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-253010b7-a590-4bd8-8e9b-aaef5dd92a05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.771665] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779261, 'name': RelocateVM_Task} progress is 20%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.772941] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1498.772941] env[63379]: value = "task-1779263" [ 1498.772941] env[63379]: _type = "Task" [ 1498.772941] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.780492] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779263, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.873248] env[63379]: DEBUG nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1498.928896] env[63379]: DEBUG nova.network.neutron [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1499.161989] env[63379]: DEBUG oslo_concurrency.lockutils [None req-14336e09-e88c-45c8-b77c-28ae41bc9699 tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.398s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.197907] env[63379]: DEBUG nova.network.neutron [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Updating instance_info_cache with network_info: [{"id": "da9aa440-961a-44c6-95bd-7e4d31987617", "address": "fa:16:3e:d1:9c:1c", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda9aa440-96", "ovs_interfaceid": "da9aa440-961a-44c6-95bd-7e4d31987617", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.275766] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779261, 'name': RelocateVM_Task, 'duration_secs': 1.144355} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.279167] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1499.279379] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369286', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'name': 'volume-084d5362-d8e9-4034-9623-555ed06a1add', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'serial': '084d5362-d8e9-4034-9623-555ed06a1add'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1499.280178] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd5dfae-c8cb-4a53-92a0-92c9ff8e4f25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.291603] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.307552] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f836c8-4524-4bd7-a4d0-9e887771cdd4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.335419] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] volume-084d5362-d8e9-4034-9623-555ed06a1add/volume-084d5362-d8e9-4034-9623-555ed06a1add.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.338518] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbee18b6-824a-4d47-be8f-da73c860dbaa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.358733] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1499.358733] env[63379]: value = "task-1779264" [ 1499.358733] env[63379]: _type = "Task" [ 1499.358733] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.369874] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779264, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.423313] env[63379]: DEBUG nova.network.neutron [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Updating instance_info_cache with network_info: [{"id": "5c6da110-b3ed-4065-94b0-004b98fd1363", "address": "fa:16:3e:63:c5:8b", "network": {"id": "db8165e6-d149-447f-8124-dd0f145b95ee", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1894207473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a35cb189afd489591fe9d6e85640d4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6da110-b3", "ovs_interfaceid": "5c6da110-b3ed-4065-94b0-004b98fd1363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.524169] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd1bb05-fd0b-4f19-8872-334d969729be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.532314] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07778843-46f3-427f-93d2-a13cb02f2f00 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.565591] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47439a0-98da-424f-846c-045d8ed7d710 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.572918] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18eedea-bc5d-4582-b5bd-4acea21f2bdf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.587118] env[63379]: DEBUG nova.compute.provider_tree [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.623282] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.623562] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.623888] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.623974] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.624173] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.628995] env[63379]: INFO nova.compute.manager [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Terminating instance [ 1499.630815] env[63379]: DEBUG nova.compute.manager [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1499.631017] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1499.631864] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22424139-d45c-4b5c-8feb-2b73dbe9d41a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.639628] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1499.639875] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4609016a-89c2-42e8-a3a6-158e68dd3e8a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.647038] env[63379]: DEBUG oslo_vmware.api [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1499.647038] env[63379]: value = "task-1779265" [ 1499.647038] env[63379]: _type = "Task" [ 1499.647038] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.654924] env[63379]: DEBUG oslo_vmware.api [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.671063] env[63379]: DEBUG nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1499.701970] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Releasing lock "refresh_cache-aa44a4ff-14e5-42d2-a082-06fe0ae9646c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.702441] env[63379]: DEBUG nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Instance network_info: |[{"id": "da9aa440-961a-44c6-95bd-7e4d31987617", "address": "fa:16:3e:d1:9c:1c", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda9aa440-96", "ovs_interfaceid": "da9aa440-961a-44c6-95bd-7e4d31987617", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1499.703050] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:9c:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da9aa440-961a-44c6-95bd-7e4d31987617', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1499.712064] env[63379]: DEBUG oslo.service.loopingcall [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.712969] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1499.713451] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34b40987-a111-427b-8acf-ff3dd422caa1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.734894] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1499.734894] env[63379]: value = "task-1779266" [ 1499.734894] env[63379]: _type = "Task" [ 1499.734894] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.742987] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779266, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.787128] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779263, 'name': ReconfigVM_Task, 'duration_secs': 0.61662} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.787449] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Reconfigured VM instance instance-00000023 to attach disk [datastore1] ee36cc5f-61a1-4e4f-9cae-670f5868d90c/ee36cc5f-61a1-4e4f-9cae-670f5868d90c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1499.788138] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dd2404e-ad64-4024-84bd-8734e84ac716 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.796299] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1499.796299] env[63379]: value = "task-1779267" [ 1499.796299] env[63379]: _type = "Task" [ 1499.796299] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.806602] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779267, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.829980] env[63379]: DEBUG nova.compute.manager [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Received event network-changed-da9aa440-961a-44c6-95bd-7e4d31987617 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1499.830317] env[63379]: DEBUG nova.compute.manager [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Refreshing instance network info cache due to event network-changed-da9aa440-961a-44c6-95bd-7e4d31987617. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1499.830686] env[63379]: DEBUG oslo_concurrency.lockutils [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] Acquiring lock "refresh_cache-aa44a4ff-14e5-42d2-a082-06fe0ae9646c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.830946] env[63379]: DEBUG oslo_concurrency.lockutils [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] Acquired lock "refresh_cache-aa44a4ff-14e5-42d2-a082-06fe0ae9646c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.831275] env[63379]: DEBUG nova.network.neutron [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Refreshing network info cache for port da9aa440-961a-44c6-95bd-7e4d31987617 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.869262] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779264, 'name': ReconfigVM_Task, 'duration_secs': 0.457898} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.869568] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Reconfigured VM instance instance-00000024 to attach disk [datastore1] volume-084d5362-d8e9-4034-9623-555ed06a1add/volume-084d5362-d8e9-4034-9623-555ed06a1add.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1499.874433] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0608cd90-9b20-4b56-a15e-86ce3a896cef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.887670] env[63379]: DEBUG nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1499.890515] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1499.890515] env[63379]: value = "task-1779268" [ 1499.890515] env[63379]: _type = "Task" [ 1499.890515] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.898408] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779268, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.911800] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1499.912021] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1499.912196] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1499.912384] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1499.912533] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1499.912708] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1499.912929] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1499.913108] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1499.913284] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1499.913449] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1499.913627] env[63379]: DEBUG nova.virt.hardware [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1499.914515] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6967fa2-7990-4ccc-bd99-ac05f08ea00a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.922817] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713f9abf-235f-4e31-82fb-901688ef9822 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.928931] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Releasing lock "refresh_cache-a78feafb-00bc-44c4-acd3-a36fb8a81767" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.931104] env[63379]: DEBUG nova.compute.manager [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1499.931874] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d423dec1-04b0-451c-b41c-54592afc4781 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.090366] env[63379]: DEBUG nova.scheduler.client.report [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1500.157996] env[63379]: DEBUG oslo_vmware.api [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779265, 'name': PowerOffVM_Task, 'duration_secs': 0.227204} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.158306] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1500.158484] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1500.158756] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ae5152b-324f-4085-976e-abde8e703633 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.191698] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.244417] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779266, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.275173] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1500.275456] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1500.275757] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Deleting the datastore file [datastore1] 07cc8cd7-8368-41dd-ae13-01c8275cac9e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1500.276146] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad23c2ba-09e7-4b2f-9959-dc22d1c017dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.282402] env[63379]: DEBUG oslo_vmware.api [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for the task: (returnval){ [ 1500.282402] env[63379]: value = "task-1779270" [ 1500.282402] env[63379]: _type = "Task" [ 1500.282402] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.295210] env[63379]: DEBUG oslo_vmware.api [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.306268] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779267, 'name': Rename_Task, 'duration_secs': 0.177977} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.306577] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1500.306819] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dcea352-3830-496b-a315-c1b5d433bd56 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.315293] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1500.315293] env[63379]: value = "task-1779271" [ 1500.315293] env[63379]: _type = "Task" [ 1500.315293] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.325021] env[63379]: DEBUG nova.compute.manager [req-46bf523c-6896-4368-8b10-43df20bbf1c2 req-c4f22058-1e06-4a1f-939c-f0bb452ac9fc service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Received event network-vif-plugged-d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1500.325021] env[63379]: DEBUG oslo_concurrency.lockutils [req-46bf523c-6896-4368-8b10-43df20bbf1c2 req-c4f22058-1e06-4a1f-939c-f0bb452ac9fc service nova] Acquiring lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.325021] env[63379]: DEBUG oslo_concurrency.lockutils [req-46bf523c-6896-4368-8b10-43df20bbf1c2 req-c4f22058-1e06-4a1f-939c-f0bb452ac9fc service nova] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.325021] env[63379]: DEBUG oslo_concurrency.lockutils [req-46bf523c-6896-4368-8b10-43df20bbf1c2 req-c4f22058-1e06-4a1f-939c-f0bb452ac9fc service nova] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.325021] env[63379]: DEBUG nova.compute.manager [req-46bf523c-6896-4368-8b10-43df20bbf1c2 req-c4f22058-1e06-4a1f-939c-f0bb452ac9fc service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] No waiting events found dispatching network-vif-plugged-d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1500.325021] env[63379]: WARNING nova.compute.manager [req-46bf523c-6896-4368-8b10-43df20bbf1c2 req-c4f22058-1e06-4a1f-939c-f0bb452ac9fc service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Received unexpected event network-vif-plugged-d994b910-f078-4d71-a9e5-f3177a54dfef for instance with vm_state building and task_state spawning. [ 1500.326797] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.403848] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779268, 'name': ReconfigVM_Task, 'duration_secs': 0.124532} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.403848] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369286', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'name': 'volume-084d5362-d8e9-4034-9623-555ed06a1add', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'serial': '084d5362-d8e9-4034-9623-555ed06a1add'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1500.403848] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92e8f14e-24e8-472e-ba13-9660bfd491ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.410515] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1500.410515] env[63379]: value = "task-1779272" [ 1500.410515] env[63379]: _type = "Task" [ 1500.410515] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.419664] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779272, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.459315] env[63379]: DEBUG nova.network.neutron [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Successfully updated port: d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1500.572822] env[63379]: DEBUG nova.network.neutron [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Updated VIF entry in instance network info cache for port da9aa440-961a-44c6-95bd-7e4d31987617. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.573204] env[63379]: DEBUG nova.network.neutron [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Updating instance_info_cache with network_info: [{"id": "da9aa440-961a-44c6-95bd-7e4d31987617", "address": "fa:16:3e:d1:9c:1c", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda9aa440-96", "ovs_interfaceid": "da9aa440-961a-44c6-95bd-7e4d31987617", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.595059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.739s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.595567] env[63379]: DEBUG nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1500.597988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.123s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.598186] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.598339] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1500.598657] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.992s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.598828] env[63379]: DEBUG nova.objects.instance [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lazy-loading 'resources' on Instance uuid 76731b1b-af66-441b-8fe4-d5d7e7faf3ca {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.600999] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d28bbdd-c7aa-49e5-b1d1-a44f39ea4c7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.609366] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ae12b3-7e39-4d72-9cd6-898a9b2f16ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.627961] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efd4f0e-1467-4b3f-95d4-a30f0c8277c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.634667] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a435547c-9b55-4900-a14d-13adc5c6d8b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.664217] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180242MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1500.664417] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.745567] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779266, 'name': CreateVM_Task, 'duration_secs': 0.58294} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.745863] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1500.746524] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.746701] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.747042] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1500.747322] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a00737c-4a3c-4367-920b-38820ab6800b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.752338] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1500.752338] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52facfd3-6736-506a-e57c-3b7c730d20fe" [ 1500.752338] env[63379]: _type = "Task" [ 1500.752338] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.759662] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52facfd3-6736-506a-e57c-3b7c730d20fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.790289] env[63379]: DEBUG oslo_vmware.api [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Task: {'id': task-1779270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364786} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.790574] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1500.790734] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1500.790945] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1500.791152] env[63379]: INFO nova.compute.manager [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1500.791391] env[63379]: DEBUG oslo.service.loopingcall [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.791575] env[63379]: DEBUG nova.compute.manager [-] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1500.791669] env[63379]: DEBUG nova.network.neutron [-] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1500.824737] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779271, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.920478] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779272, 'name': Rename_Task, 'duration_secs': 0.146106} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.920760] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1500.921008] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7ba2ea8-65a7-4b21-bed2-dd46d9a3c2a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.927458] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1500.927458] env[63379]: value = "task-1779273" [ 1500.927458] env[63379]: _type = "Task" [ 1500.927458] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.935413] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.961181] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1df89b-ad1e-49da-ada0-c068d11cc785 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.966093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.966313] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquired lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.966626] env[63379]: DEBUG nova.network.neutron [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1500.973283] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Doing hard reboot of VM {{(pid=63379) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1500.973283] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-1e4c641c-6a78-4ad5-a8b8-d5de3606d9a4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.978054] env[63379]: DEBUG oslo_vmware.api [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1500.978054] env[63379]: value = "task-1779274" [ 1500.978054] env[63379]: _type = "Task" [ 1500.978054] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.988592] env[63379]: DEBUG oslo_vmware.api [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779274, 'name': ResetVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.075947] env[63379]: DEBUG oslo_concurrency.lockutils [req-98ec822e-af57-4d9c-8cd7-c2a1d52d3332 req-3ce5ec3c-661e-4ffb-9f67-e5027e5e0c75 service nova] Releasing lock "refresh_cache-aa44a4ff-14e5-42d2-a082-06fe0ae9646c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.102160] env[63379]: DEBUG nova.compute.utils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1501.103813] env[63379]: DEBUG nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1501.104021] env[63379]: DEBUG nova.network.neutron [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1501.158393] env[63379]: DEBUG nova.policy [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a09fcc05b7d4239bcd13389bb41ebf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6552f9956224ba5a0a01328da741242', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1501.268857] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52facfd3-6736-506a-e57c-3b7c730d20fe, 'name': SearchDatastore_Task, 'duration_secs': 0.01717} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.270237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.270237] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1501.270237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.270237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.270237] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1501.270237] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-032d3b8b-14c6-47dc-8d7c-72703ac80d64 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.281679] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1501.282037] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1501.282671] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ebc139a-7f4d-4f69-ad93-cfa982354f3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.291422] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1501.291422] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521636b2-f15a-27a4-a733-44aef29fbf00" [ 1501.291422] env[63379]: _type = "Task" [ 1501.291422] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.299621] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521636b2-f15a-27a4-a733-44aef29fbf00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.325979] env[63379]: DEBUG oslo_vmware.api [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779271, 'name': PowerOnVM_Task, 'duration_secs': 1.001327} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.328614] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1501.328856] env[63379]: INFO nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Took 9.72 seconds to spawn the instance on the hypervisor. [ 1501.329048] env[63379]: DEBUG nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1501.330052] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c62c87-2563-4cac-bbf7-61802afeac23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.442112] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779273, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.486814] env[63379]: DEBUG oslo_vmware.api [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779274, 'name': ResetVM_Task, 'duration_secs': 0.083465} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.487137] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Did hard reboot of VM {{(pid=63379) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1501.487340] env[63379]: DEBUG nova.compute.manager [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1501.488283] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247a7c29-2579-4a74-9c56-a6cbe11af9c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.499700] env[63379]: DEBUG nova.network.neutron [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1501.527999] env[63379]: DEBUG nova.network.neutron [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Successfully created port: 021a6cdc-585b-40dc-a330-d328102cf80c {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1501.609683] env[63379]: DEBUG nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1501.633087] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334c2307-f791-450d-a337-23313e76c8ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.646024] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7773a2-0ef6-4f32-8a03-cfa7a62e8e0a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.683656] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40070b49-6af4-41dd-b0c7-1fc5c683f0f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.692073] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289f15ba-98a6-4867-9a96-6c38a1ec8781 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.708379] env[63379]: DEBUG nova.compute.provider_tree [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.744800] env[63379]: DEBUG nova.network.neutron [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.764297] env[63379]: DEBUG nova.network.neutron [-] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.802244] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521636b2-f15a-27a4-a733-44aef29fbf00, 'name': SearchDatastore_Task, 'duration_secs': 0.011431} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.803115] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13324aa9-fbcc-481f-b833-b135b7bb7bae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.808603] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1501.808603] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52028808-a1f8-b51e-96d1-0a600e1a9ddc" [ 1501.808603] env[63379]: _type = "Task" [ 1501.808603] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.816354] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52028808-a1f8-b51e-96d1-0a600e1a9ddc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.847424] env[63379]: INFO nova.compute.manager [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Took 55.33 seconds to build instance. [ 1501.860573] env[63379]: DEBUG nova.compute.manager [req-b638f6ee-77d6-49d1-82ab-d1f85b1c1919 req-cf871a02-d918-4f15-979a-71617271acba service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received event network-vif-deleted-85bd2ccd-417b-4f6c-9e65-c41d8adb52d2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1501.860792] env[63379]: DEBUG nova.compute.manager [req-b638f6ee-77d6-49d1-82ab-d1f85b1c1919 req-cf871a02-d918-4f15-979a-71617271acba service nova] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Received event network-vif-deleted-0364d0f7-f24e-4ee3-aead-cb4a79933b69 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1501.937263] env[63379]: DEBUG oslo_vmware.api [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779273, 'name': PowerOnVM_Task, 'duration_secs': 0.538475} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.937488] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1501.937692] env[63379]: INFO nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Took 5.62 seconds to spawn the instance on the hypervisor. [ 1501.937873] env[63379]: DEBUG nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1501.938631] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5563e19d-0d5c-4239-9a92-aa99a361b8f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.006928] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f802eb7-212c-4a84-a176-3caa01b611b4 tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.005s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.211490] env[63379]: DEBUG nova.scheduler.client.report [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1502.247892] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Releasing lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.248132] env[63379]: DEBUG nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Instance network_info: |[{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1502.248567] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:dc:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd994b910-f078-4d71-a9e5-f3177a54dfef', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1502.256505] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Creating folder: Project (7fadf3c8628840efb6c8f6f99df21694). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1502.256803] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78b80b01-a1cd-45cd-8ef9-ba4854a6ad35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.266881] env[63379]: INFO nova.compute.manager [-] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Took 1.48 seconds to deallocate network for instance. [ 1502.269190] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Created folder: Project (7fadf3c8628840efb6c8f6f99df21694) in parent group-v369214. [ 1502.269469] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Creating folder: Instances. Parent ref: group-v369328. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1502.272259] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6cc07a8-7ccc-4318-a6f3-9146f658187f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.281161] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Created folder: Instances in parent group-v369328. [ 1502.281523] env[63379]: DEBUG oslo.service.loopingcall [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1502.281798] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1502.282116] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-673a82f7-d330-496d-b467-bc00dac5bdf4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.314665] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1502.314665] env[63379]: value = "task-1779277" [ 1502.314665] env[63379]: _type = "Task" [ 1502.314665] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.324110] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52028808-a1f8-b51e-96d1-0a600e1a9ddc, 'name': SearchDatastore_Task, 'duration_secs': 0.022306} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.324655] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.324998] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aa44a4ff-14e5-42d2-a082-06fe0ae9646c/aa44a4ff-14e5-42d2-a082-06fe0ae9646c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1502.325221] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c743f9d1-38d2-49b9-bf64-877f8d7ef3c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.331363] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779277, 'name': CreateVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.337041] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1502.337041] env[63379]: value = "task-1779278" [ 1502.337041] env[63379]: _type = "Task" [ 1502.337041] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.345211] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.347612] env[63379]: DEBUG nova.compute.manager [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Received event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1502.347612] env[63379]: DEBUG nova.compute.manager [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing instance network info cache due to event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1502.347612] env[63379]: DEBUG oslo_concurrency.lockutils [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] Acquiring lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.348012] env[63379]: DEBUG oslo_concurrency.lockutils [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] Acquired lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.348012] env[63379]: DEBUG nova.network.neutron [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1502.349314] env[63379]: DEBUG oslo_concurrency.lockutils [None req-64af1974-6c02-4eae-9666-85b8ab493149 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.797s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.454857] env[63379]: INFO nova.compute.manager [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Took 45.86 seconds to build instance. [ 1502.621404] env[63379]: DEBUG nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1502.652796] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1502.653161] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1502.653415] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1502.653661] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1502.653984] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1502.654286] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1502.654650] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1502.654950] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1502.655195] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1502.655413] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1502.655635] env[63379]: DEBUG nova.virt.hardware [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1502.656736] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4a44a5-58c4-4b5b-b6e1-5b1687389067 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.666169] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9379c3ec-c933-4d2a-a8e1-835ab1a5cb1c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.719107] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.118s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.720225] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.012s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.724839] env[63379]: INFO nova.compute.claims [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1502.750616] env[63379]: INFO nova.scheduler.client.report [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Deleted allocations for instance 76731b1b-af66-441b-8fe4-d5d7e7faf3ca [ 1502.776264] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.818718] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.819095] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.819323] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "a78feafb-00bc-44c4-acd3-a36fb8a81767-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.819510] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.819724] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.822214] env[63379]: INFO nova.compute.manager [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Terminating instance [ 1502.828561] env[63379]: DEBUG nova.compute.manager [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1502.828810] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1502.829830] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc34e0ec-587c-4ff9-be32-b3b520f8e9cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.841248] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779277, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.847374] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1502.847599] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-626f7b1a-87e9-4aae-813c-1946a9b8c8bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.853365] env[63379]: DEBUG nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1502.861308] env[63379]: DEBUG oslo_vmware.api [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1502.861308] env[63379]: value = "task-1779279" [ 1502.861308] env[63379]: _type = "Task" [ 1502.861308] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.862502] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779278, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.876224] env[63379]: DEBUG oslo_vmware.api [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.957741] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5b97046-42d2-4bc6-985d-b7c9aabf53e3 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.405s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.177221] env[63379]: DEBUG nova.network.neutron [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Successfully updated port: 021a6cdc-585b-40dc-a330-d328102cf80c {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1503.259054] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c65b6d5e-cf04-4285-af70-fc58a5cdd4a5 tempest-ImagesOneServerTestJSON-441256758 tempest-ImagesOneServerTestJSON-441256758-project-member] Lock "76731b1b-af66-441b-8fe4-d5d7e7faf3ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.875s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.328118] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779277, 'name': CreateVM_Task, 'duration_secs': 0.704413} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.328294] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1503.328942] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.329170] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.329447] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1503.329697] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f37b3b3e-f297-4ff4-832e-b562d2b17c2f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.334139] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1503.334139] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d4a5ae-c711-9a8b-4e52-f37e83d9584d" [ 1503.334139] env[63379]: _type = "Task" [ 1503.334139] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.341817] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d4a5ae-c711-9a8b-4e52-f37e83d9584d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.348903] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779278, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610189} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.349159] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aa44a4ff-14e5-42d2-a082-06fe0ae9646c/aa44a4ff-14e5-42d2-a082-06fe0ae9646c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1503.349386] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1503.349620] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d03d699c-1d27-4efb-a57d-b1d5c275558d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.355150] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1503.355150] env[63379]: value = "task-1779280" [ 1503.355150] env[63379]: _type = "Task" [ 1503.355150] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.364478] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779280, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.373152] env[63379]: DEBUG oslo_vmware.api [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779279, 'name': PowerOffVM_Task, 'duration_secs': 0.28343} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.375531] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1503.375716] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1503.376433] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f15971d-3200-4d87-bd5f-7f1bdad80fc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.402521] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.460082] env[63379]: DEBUG nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1503.463882] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1503.463882] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1503.463882] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Deleting the datastore file [datastore1] a78feafb-00bc-44c4-acd3-a36fb8a81767 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1503.463882] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc281168-647f-48fb-ab9e-6469c9adf503 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.474634] env[63379]: DEBUG oslo_vmware.api [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for the task: (returnval){ [ 1503.474634] env[63379]: value = "task-1779282" [ 1503.474634] env[63379]: _type = "Task" [ 1503.474634] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.486378] env[63379]: DEBUG oslo_vmware.api [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.505026] env[63379]: DEBUG nova.network.neutron [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updated VIF entry in instance network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1503.505425] env[63379]: DEBUG nova.network.neutron [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.680592] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "refresh_cache-758ade2c-7f75-4907-95d5-681d5792ae31" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.680740] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "refresh_cache-758ade2c-7f75-4907-95d5-681d5792ae31" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.680890] env[63379]: DEBUG nova.network.neutron [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1503.851338] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d4a5ae-c711-9a8b-4e52-f37e83d9584d, 'name': SearchDatastore_Task, 'duration_secs': 0.03559} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.851913] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.852325] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1503.852701] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.852973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.853292] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1503.853949] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45707f16-eb60-4ad8-a7ad-953a9a77fb59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.865694] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779280, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083366} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.869352] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1503.869699] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1503.869875] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1503.871851] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e8e5da-ce36-4658-8265-5fb5be5dcf66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.874138] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e021e9a-fd06-4080-91e6-fa3b9a99a291 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.880253] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1503.880253] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef2374-2a5e-781a-432f-95f1aec0ed60" [ 1503.880253] env[63379]: _type = "Task" [ 1503.880253] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.903363] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] aa44a4ff-14e5-42d2-a082-06fe0ae9646c/aa44a4ff-14e5-42d2-a082-06fe0ae9646c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1503.910472] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6b81a3d-8b49-4a52-80c4-b3afe37a3cf8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.934798] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef2374-2a5e-781a-432f-95f1aec0ed60, 'name': SearchDatastore_Task, 'duration_secs': 0.011724} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.937403] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1503.937403] env[63379]: value = "task-1779283" [ 1503.937403] env[63379]: _type = "Task" [ 1503.937403] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.937610] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f56fbb29-c846-41a9-820c-f71cb68065f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.951541] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1503.951541] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52daa180-52a1-7c5f-f541-0e1f58bb4bb9" [ 1503.951541] env[63379]: _type = "Task" [ 1503.951541] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.952020] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779283, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.967522] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52daa180-52a1-7c5f-f541-0e1f58bb4bb9, 'name': SearchDatastore_Task, 'duration_secs': 0.012436} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.969814] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.970111] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aedff32b-b0c2-4a93-a2c6-349d26839cc4/aedff32b-b0c2-4a93-a2c6-349d26839cc4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1503.971410] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e1ca64e-c1b1-4920-bbc0-ea8adf4fa2c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.981014] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1503.981014] env[63379]: value = "task-1779284" [ 1503.981014] env[63379]: _type = "Task" [ 1503.981014] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.994978] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.994978] env[63379]: DEBUG oslo_vmware.api [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Task: {'id': task-1779282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.451846} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.997722] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1503.997722] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1503.997722] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1503.997722] env[63379]: INFO nova.compute.manager [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1503.997722] env[63379]: DEBUG oslo.service.loopingcall [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1503.997722] env[63379]: DEBUG nova.compute.manager [-] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1503.997722] env[63379]: DEBUG nova.network.neutron [-] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1504.000998] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.005853] env[63379]: DEBUG nova.compute.manager [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Received event network-changed-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1504.006081] env[63379]: DEBUG nova.compute.manager [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Refreshing instance network info cache due to event network-changed-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1504.006387] env[63379]: DEBUG oslo_concurrency.lockutils [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] Acquiring lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.006439] env[63379]: DEBUG oslo_concurrency.lockutils [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] Acquired lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.006589] env[63379]: DEBUG nova.network.neutron [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Refreshing network info cache for port 9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1504.008601] env[63379]: DEBUG oslo_concurrency.lockutils [req-a8a44d39-0354-4887-a998-625e96eaf98f req-22b585a4-2a46-452e-91cf-acf3deb07d61 service nova] Releasing lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.261094] env[63379]: DEBUG nova.network.neutron [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1504.371859] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340dd2cc-6cff-474d-b566-b9089a31801b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.390253] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e40d50-3dd4-4463-8429-e53cc64466d3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.395605] env[63379]: DEBUG nova.compute.manager [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Received event network-vif-plugged-021a6cdc-585b-40dc-a330-d328102cf80c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1504.395996] env[63379]: DEBUG oslo_concurrency.lockutils [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] Acquiring lock "758ade2c-7f75-4907-95d5-681d5792ae31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.396454] env[63379]: DEBUG oslo_concurrency.lockutils [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] Lock "758ade2c-7f75-4907-95d5-681d5792ae31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.396690] env[63379]: DEBUG oslo_concurrency.lockutils [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] Lock "758ade2c-7f75-4907-95d5-681d5792ae31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.396880] env[63379]: DEBUG nova.compute.manager [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] No waiting events found dispatching network-vif-plugged-021a6cdc-585b-40dc-a330-d328102cf80c {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1504.397108] env[63379]: WARNING nova.compute.manager [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Received unexpected event network-vif-plugged-021a6cdc-585b-40dc-a330-d328102cf80c for instance with vm_state building and task_state spawning. [ 1504.397285] env[63379]: DEBUG nova.compute.manager [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Received event network-changed-021a6cdc-585b-40dc-a330-d328102cf80c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1504.397469] env[63379]: DEBUG nova.compute.manager [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Refreshing instance network info cache due to event network-changed-021a6cdc-585b-40dc-a330-d328102cf80c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1504.397898] env[63379]: DEBUG oslo_concurrency.lockutils [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] Acquiring lock "refresh_cache-758ade2c-7f75-4907-95d5-681d5792ae31" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.437910] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea11558-8ef7-4534-a400-0170f9ae6e4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.456448] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2b3202-a651-4878-9e1e-0ae3070d795b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.461268] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779283, 'name': ReconfigVM_Task, 'duration_secs': 0.395303} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.461588] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Reconfigured VM instance instance-00000025 to attach disk [datastore1] aa44a4ff-14e5-42d2-a082-06fe0ae9646c/aa44a4ff-14e5-42d2-a082-06fe0ae9646c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1504.463269] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d443eb2b-12b0-433c-bdc3-ca983ccc0495 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.474409] env[63379]: DEBUG nova.compute.provider_tree [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.487058] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1504.487058] env[63379]: value = "task-1779285" [ 1504.487058] env[63379]: _type = "Task" [ 1504.487058] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.495223] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779284, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.501394] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779285, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.584216] env[63379]: DEBUG nova.network.neutron [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Updating instance_info_cache with network_info: [{"id": "021a6cdc-585b-40dc-a330-d328102cf80c", "address": "fa:16:3e:bf:be:c8", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap021a6cdc-58", "ovs_interfaceid": "021a6cdc-585b-40dc-a330-d328102cf80c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.788801] env[63379]: DEBUG nova.network.neutron [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updated VIF entry in instance network info cache for port 9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1504.789268] env[63379]: DEBUG nova.network.neutron [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updating instance_info_cache with network_info: [{"id": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "address": "fa:16:3e:78:f3:2f", "network": {"id": "e4e5aea3-500f-45e8-b507-11a67a599a0a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2078307430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53f21d581df140adb2012ea248c39a1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e56e6e1-92", "ovs_interfaceid": "9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.824932] env[63379]: DEBUG nova.network.neutron [-] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.977971] env[63379]: DEBUG nova.scheduler.client.report [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1505.002611] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779284, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588869} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.003016] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779285, 'name': Rename_Task, 'duration_secs': 0.198425} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.003071] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] aedff32b-b0c2-4a93-a2c6-349d26839cc4/aedff32b-b0c2-4a93-a2c6-349d26839cc4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1505.004217] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1505.004217] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1505.004440] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-177d0cc7-e60f-4623-b7c5-270af31705da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.006842] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdaf7393-de28-4948-8b63-d850233dac9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.013869] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1505.013869] env[63379]: value = "task-1779286" [ 1505.013869] env[63379]: _type = "Task" [ 1505.013869] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.015221] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1505.015221] env[63379]: value = "task-1779287" [ 1505.015221] env[63379]: _type = "Task" [ 1505.015221] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.029033] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.029692] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779287, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.090630] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "refresh_cache-758ade2c-7f75-4907-95d5-681d5792ae31" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.091010] env[63379]: DEBUG nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance network_info: |[{"id": "021a6cdc-585b-40dc-a330-d328102cf80c", "address": "fa:16:3e:bf:be:c8", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap021a6cdc-58", "ovs_interfaceid": "021a6cdc-585b-40dc-a330-d328102cf80c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1505.091433] env[63379]: DEBUG oslo_concurrency.lockutils [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] Acquired lock "refresh_cache-758ade2c-7f75-4907-95d5-681d5792ae31" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.091632] env[63379]: DEBUG nova.network.neutron [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Refreshing network info cache for port 021a6cdc-585b-40dc-a330-d328102cf80c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1505.093082] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:be:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '021a6cdc-585b-40dc-a330-d328102cf80c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1505.103738] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating folder: Project (f6552f9956224ba5a0a01328da741242). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1505.103738] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82853d7a-0dbb-4213-9533-714be1478924 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.115173] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created folder: Project (f6552f9956224ba5a0a01328da741242) in parent group-v369214. [ 1505.115547] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating folder: Instances. Parent ref: group-v369331. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1505.115811] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18a7ba43-6a61-4ffa-9c5a-131a9aa8f559 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.125279] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created folder: Instances in parent group-v369331. [ 1505.125526] env[63379]: DEBUG oslo.service.loopingcall [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1505.125721] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1505.125933] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b27fd774-ecbb-4882-affd-f609f69169a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.145258] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1505.145258] env[63379]: value = "task-1779290" [ 1505.145258] env[63379]: _type = "Task" [ 1505.145258] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.155433] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779290, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.292899] env[63379]: DEBUG oslo_concurrency.lockutils [req-2d45bb67-9eba-4107-91cd-385ad6a20cb4 req-a5223b9c-4ae1-41be-9977-d25bab94593d service nova] Releasing lock "refresh_cache-a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.326614] env[63379]: INFO nova.compute.manager [-] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Took 1.33 seconds to deallocate network for instance. [ 1505.485566] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.486338] env[63379]: DEBUG nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1505.489774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.149s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.491556] env[63379]: DEBUG nova.objects.instance [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lazy-loading 'resources' on Instance uuid 48c0d20e-adc4-40a9-888c-ffea363f6edb {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1505.529730] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180585} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.534869] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1505.537989] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779287, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.538795] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90b8131-3d92-4e42-841d-ec4f0b64e5b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.562047] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] aedff32b-b0c2-4a93-a2c6-349d26839cc4/aedff32b-b0c2-4a93-a2c6-349d26839cc4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1505.562948] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e94aa5d9-f9d7-4de3-a97b-aff3a44f14e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.583689] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1505.583689] env[63379]: value = "task-1779291" [ 1505.583689] env[63379]: _type = "Task" [ 1505.583689] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.594862] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779291, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.656709] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779290, 'name': CreateVM_Task, 'duration_secs': 0.414609} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.656910] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1505.657680] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.657849] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.658256] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1505.658528] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e8e3af5-a365-4c0e-b664-618134140541 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.663609] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1505.663609] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e8b479-d553-4f0d-d72c-f98ddb8c2c0b" [ 1505.663609] env[63379]: _type = "Task" [ 1505.663609] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.674564] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e8b479-d553-4f0d-d72c-f98ddb8c2c0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.823540] env[63379]: DEBUG nova.network.neutron [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Updated VIF entry in instance network info cache for port 021a6cdc-585b-40dc-a330-d328102cf80c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1505.823942] env[63379]: DEBUG nova.network.neutron [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Updating instance_info_cache with network_info: [{"id": "021a6cdc-585b-40dc-a330-d328102cf80c", "address": "fa:16:3e:bf:be:c8", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap021a6cdc-58", "ovs_interfaceid": "021a6cdc-585b-40dc-a330-d328102cf80c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.833910] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.993279] env[63379]: DEBUG nova.compute.utils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1505.998076] env[63379]: DEBUG nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1505.998266] env[63379]: DEBUG nova.network.neutron [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1506.030929] env[63379]: DEBUG oslo_vmware.api [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779287, 'name': PowerOnVM_Task, 'duration_secs': 0.542418} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.031240] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.032209] env[63379]: INFO nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1506.032209] env[63379]: DEBUG nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1506.032428] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58d8a28-47ff-4be6-98a8-6b9b5eff4883 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.039835] env[63379]: DEBUG nova.compute.manager [req-61df9dc5-ae5c-448f-8f11-86c6f41139ca req-b98716db-9de1-4f46-ae99-1ff2f5f6128d service nova] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Received event network-vif-deleted-5c6da110-b3ed-4065-94b0-004b98fd1363 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1506.054988] env[63379]: DEBUG nova.policy [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a09fcc05b7d4239bcd13389bb41ebf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6552f9956224ba5a0a01328da741242', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1506.094104] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779291, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.178506] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e8b479-d553-4f0d-d72c-f98ddb8c2c0b, 'name': SearchDatastore_Task, 'duration_secs': 0.025184} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.178506] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.178670] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1506.178978] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.179371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.179457] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.180171] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93f74074-9ca9-4e8e-bc91-2bdfe23e63b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.189085] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.189085] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1506.192280] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-660bae15-6199-4ad0-93de-6dfb08ce9fdd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.198030] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1506.198030] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521d1c99-b59b-a7d2-bfbd-f59e71098782" [ 1506.198030] env[63379]: _type = "Task" [ 1506.198030] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.205935] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521d1c99-b59b-a7d2-bfbd-f59e71098782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.332107] env[63379]: DEBUG oslo_concurrency.lockutils [req-1eb2c3f1-2446-40fa-bc93-d67b4bd6c19f req-07046afe-8793-4cea-affb-64cce315bd41 service nova] Releasing lock "refresh_cache-758ade2c-7f75-4907-95d5-681d5792ae31" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.398529] env[63379]: DEBUG nova.network.neutron [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Successfully created port: 269637bb-41c4-433e-aaab-1c67c39977b1 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1506.501435] env[63379]: DEBUG nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1506.530972] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a72e36-9738-47cd-aca6-bb3bd422403c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.538862] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2bfe5c-28d5-47d8-bc7b-ad56d7279948 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.577707] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6497fb50-58e9-477c-a323-dba2ea4a6609 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.580801] env[63379]: INFO nova.compute.manager [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Took 48.76 seconds to build instance. [ 1506.590208] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc0dda2-58f5-4185-8a7a-9fea83a2059f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.599673] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779291, 'name': ReconfigVM_Task, 'duration_secs': 0.968399} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.607598] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Reconfigured VM instance instance-00000026 to attach disk [datastore1] aedff32b-b0c2-4a93-a2c6-349d26839cc4/aedff32b-b0c2-4a93-a2c6-349d26839cc4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1506.608690] env[63379]: DEBUG nova.compute.provider_tree [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.609896] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbcee354-1ab5-47f2-9724-2e6ae79b32bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.617008] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1506.617008] env[63379]: value = "task-1779292" [ 1506.617008] env[63379]: _type = "Task" [ 1506.617008] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.630501] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779292, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.708891] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521d1c99-b59b-a7d2-bfbd-f59e71098782, 'name': SearchDatastore_Task, 'duration_secs': 0.010858} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.709744] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4c6999a-74c5-4534-8a1b-81bc918b416b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.714709] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1506.714709] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5214b8db-0952-fda6-e9aa-07ba8c2f3acb" [ 1506.714709] env[63379]: _type = "Task" [ 1506.714709] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.722245] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5214b8db-0952-fda6-e9aa-07ba8c2f3acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.083368] env[63379]: DEBUG oslo_concurrency.lockutils [None req-73412d49-e6e2-45da-ad6e-b2155e3f27a8 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.852s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.115139] env[63379]: DEBUG nova.scheduler.client.report [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1507.133695] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779292, 'name': Rename_Task, 'duration_secs': 0.296058} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.133695] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1507.133695] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-328fc524-9c08-416f-9a6a-2a9b80e395aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.140945] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1507.140945] env[63379]: value = "task-1779293" [ 1507.140945] env[63379]: _type = "Task" [ 1507.140945] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.153753] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.228082] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5214b8db-0952-fda6-e9aa-07ba8c2f3acb, 'name': SearchDatastore_Task, 'duration_secs': 0.010087} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.228412] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.228693] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1507.228970] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d931d21-9b1c-4a75-b4a3-68b6b6f791c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.236710] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1507.236710] env[63379]: value = "task-1779294" [ 1507.236710] env[63379]: _type = "Task" [ 1507.236710] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.246946] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.514281] env[63379]: DEBUG nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1507.543251] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1507.543251] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1507.543251] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1507.543251] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1507.543691] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1507.544058] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1507.544503] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1507.544817] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1507.545154] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1507.545459] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1507.545793] env[63379]: DEBUG nova.virt.hardware [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1507.546954] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825eabd0-f5d4-4335-9749-ac7ef6c616ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.557367] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e02244-e03a-4924-ad13-6bce6a86c5ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.586019] env[63379]: DEBUG nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1507.618551] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.129s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.620973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.797s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.622670] env[63379]: INFO nova.compute.claims [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1507.654023] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779293, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.659390] env[63379]: INFO nova.scheduler.client.report [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted allocations for instance 48c0d20e-adc4-40a9-888c-ffea363f6edb [ 1507.748865] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779294, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.066403] env[63379]: DEBUG nova.compute.manager [req-3cdeedf7-2a6c-4f07-8538-8dbc0691dd16 req-e4e23462-7886-4a61-ae7b-5463ec08abb4 service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Received event network-vif-plugged-269637bb-41c4-433e-aaab-1c67c39977b1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1508.066635] env[63379]: DEBUG oslo_concurrency.lockutils [req-3cdeedf7-2a6c-4f07-8538-8dbc0691dd16 req-e4e23462-7886-4a61-ae7b-5463ec08abb4 service nova] Acquiring lock "0324da80-b97c-4dc9-9083-199fbda60341-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.066855] env[63379]: DEBUG oslo_concurrency.lockutils [req-3cdeedf7-2a6c-4f07-8538-8dbc0691dd16 req-e4e23462-7886-4a61-ae7b-5463ec08abb4 service nova] Lock "0324da80-b97c-4dc9-9083-199fbda60341-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.067198] env[63379]: DEBUG oslo_concurrency.lockutils [req-3cdeedf7-2a6c-4f07-8538-8dbc0691dd16 req-e4e23462-7886-4a61-ae7b-5463ec08abb4 service nova] Lock "0324da80-b97c-4dc9-9083-199fbda60341-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.067433] env[63379]: DEBUG nova.compute.manager [req-3cdeedf7-2a6c-4f07-8538-8dbc0691dd16 req-e4e23462-7886-4a61-ae7b-5463ec08abb4 service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] No waiting events found dispatching network-vif-plugged-269637bb-41c4-433e-aaab-1c67c39977b1 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1508.067625] env[63379]: WARNING nova.compute.manager [req-3cdeedf7-2a6c-4f07-8538-8dbc0691dd16 req-e4e23462-7886-4a61-ae7b-5463ec08abb4 service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Received unexpected event network-vif-plugged-269637bb-41c4-433e-aaab-1c67c39977b1 for instance with vm_state building and task_state spawning. [ 1508.111522] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.154466] env[63379]: DEBUG oslo_vmware.api [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779293, 'name': PowerOnVM_Task, 'duration_secs': 0.849747} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.154738] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1508.154968] env[63379]: INFO nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1508.155184] env[63379]: DEBUG nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1508.156178] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f33304a-1f55-474a-b3ed-cc3825a59ce2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.169982] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e047eba5-5831-4ef9-975e-1af10c587cf4 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "48c0d20e-adc4-40a9-888c-ffea363f6edb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.538s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.186835] env[63379]: DEBUG nova.network.neutron [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Successfully updated port: 269637bb-41c4-433e-aaab-1c67c39977b1 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1508.248024] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555022} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.248024] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1508.248312] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1508.248312] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d4f46fc-a009-4390-a3af-2e705696d6e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.255231] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1508.255231] env[63379]: value = "task-1779295" [ 1508.255231] env[63379]: _type = "Task" [ 1508.255231] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.263954] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779295, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.301377] env[63379]: DEBUG nova.compute.manager [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1508.302084] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7451bcb-f922-497a-9505-d322a6183494 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.679783] env[63379]: INFO nova.compute.manager [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Took 42.89 seconds to build instance. [ 1508.696182] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "refresh_cache-0324da80-b97c-4dc9-9083-199fbda60341" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.696182] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "refresh_cache-0324da80-b97c-4dc9-9083-199fbda60341" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.696182] env[63379]: DEBUG nova.network.neutron [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1508.767798] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779295, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085312} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.768098] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1508.768971] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf89d09-a4fb-47f6-974b-88510086d48a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.793049] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1508.796131] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7ce1673-b2e0-4cc3-a0c0-fec88fbbc8b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.815876] env[63379]: INFO nova.compute.manager [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] instance snapshotting [ 1508.820124] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c83da1-fe9d-4640-acb5-b1673191b0fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.823885] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1508.823885] env[63379]: value = "task-1779296" [ 1508.823885] env[63379]: _type = "Task" [ 1508.823885] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.846221] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b168ed8f-3def-4f21-9b30-e35c7105fc4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.853472] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.185455] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6b070053-23d1-4429-8e04-d9be4876620d tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.190s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.228599] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c79bf5-4dc3-4d2c-92dd-e667a3af4cbc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.236839] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757c5628-a2bb-44da-ab7b-ac3f02f51061 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.272691] env[63379]: DEBUG nova.network.neutron [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1509.275627] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c5525d-b534-42c3-babc-8f2563cd0b31 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.285990] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612c0a56-0afa-4a5f-b672-23db51e001ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.302156] env[63379]: DEBUG nova.compute.provider_tree [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.336500] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.367383] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1509.367716] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ac866c66-6df0-4c18-9e2b-507a8874166c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.376183] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1509.376183] env[63379]: value = "task-1779297" [ 1509.376183] env[63379]: _type = "Task" [ 1509.376183] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.386702] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779297, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.557533] env[63379]: DEBUG nova.network.neutron [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Updating instance_info_cache with network_info: [{"id": "269637bb-41c4-433e-aaab-1c67c39977b1", "address": "fa:16:3e:ec:3a:c4", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap269637bb-41", "ovs_interfaceid": "269637bb-41c4-433e-aaab-1c67c39977b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.688964] env[63379]: DEBUG nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1509.807820] env[63379]: DEBUG nova.scheduler.client.report [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1509.838776] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779296, 'name': ReconfigVM_Task, 'duration_secs': 0.642955} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.840034] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1509.841156] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d53d4a9-08d1-4415-99fb-1accce176888 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.849324] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1509.849324] env[63379]: value = "task-1779298" [ 1509.849324] env[63379]: _type = "Task" [ 1509.849324] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.860654] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779298, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.887383] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779297, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.061209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "refresh_cache-0324da80-b97c-4dc9-9083-199fbda60341" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.061668] env[63379]: DEBUG nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Instance network_info: |[{"id": "269637bb-41c4-433e-aaab-1c67c39977b1", "address": "fa:16:3e:ec:3a:c4", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap269637bb-41", "ovs_interfaceid": "269637bb-41c4-433e-aaab-1c67c39977b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1510.062745] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:3a:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '269637bb-41c4-433e-aaab-1c67c39977b1', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1510.076379] env[63379]: DEBUG oslo.service.loopingcall [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.076379] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1510.076618] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92f28160-fb63-474a-99d9-6eb7ec9c5727 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.099207] env[63379]: DEBUG nova.compute.manager [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Received event network-changed-269637bb-41c4-433e-aaab-1c67c39977b1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1510.099455] env[63379]: DEBUG nova.compute.manager [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Refreshing instance network info cache due to event network-changed-269637bb-41c4-433e-aaab-1c67c39977b1. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1510.099702] env[63379]: DEBUG oslo_concurrency.lockutils [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] Acquiring lock "refresh_cache-0324da80-b97c-4dc9-9083-199fbda60341" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.099833] env[63379]: DEBUG oslo_concurrency.lockutils [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] Acquired lock "refresh_cache-0324da80-b97c-4dc9-9083-199fbda60341" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.099997] env[63379]: DEBUG nova.network.neutron [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Refreshing network info cache for port 269637bb-41c4-433e-aaab-1c67c39977b1 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.107443] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1510.107443] env[63379]: value = "task-1779299" [ 1510.107443] env[63379]: _type = "Task" [ 1510.107443] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.117263] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779299, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.221130] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.316022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.316022] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1510.317786] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.378s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.319684] env[63379]: DEBUG nova.objects.instance [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lazy-loading 'resources' on Instance uuid 55fb6899-0321-4bf2-bf3f-2e87dd479433 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1510.361232] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779298, 'name': Rename_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.387757] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779297, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.618756] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779299, 'name': CreateVM_Task, 'duration_secs': 0.399151} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.618756] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1510.618756] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.618756] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.619021] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1510.620030] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e9935b7-6d4d-4c54-bcb4-f2d78c67d9e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.624498] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1510.624498] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ac8d53-680d-c65e-9769-facb357d10ce" [ 1510.624498] env[63379]: _type = "Task" [ 1510.624498] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.632364] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ac8d53-680d-c65e-9769-facb357d10ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.819039] env[63379]: DEBUG nova.compute.utils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1510.820420] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1510.820594] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1510.861586] env[63379]: DEBUG nova.network.neutron [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Updated VIF entry in instance network info cache for port 269637bb-41c4-433e-aaab-1c67c39977b1. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1510.861586] env[63379]: DEBUG nova.network.neutron [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Updating instance_info_cache with network_info: [{"id": "269637bb-41c4-433e-aaab-1c67c39977b1", "address": "fa:16:3e:ec:3a:c4", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap269637bb-41", "ovs_interfaceid": "269637bb-41c4-433e-aaab-1c67c39977b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.867219] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779298, 'name': Rename_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.877075] env[63379]: DEBUG nova.policy [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db6bd541e63b47e29e5c02fc02f162c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3d2c2429642ee92f4bb7e53b0a128', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1510.889311] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779297, 'name': CreateSnapshot_Task, 'duration_secs': 1.040176} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.889516] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1510.890453] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72adc667-3330-4fa7-aa02-544860232b94 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.130583] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.131848] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.142388] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ac8d53-680d-c65e-9769-facb357d10ce, 'name': SearchDatastore_Task, 'duration_secs': 0.012561} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.142753] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.143210] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1511.143447] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.143710] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.143922] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.144210] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21f52421-aa69-46b0-ad20-7f21d462608e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.164808] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.165078] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1511.165851] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e579169-31ba-413a-a2cf-27676a5885b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.175106] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1511.175106] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5246abca-cf40-c358-2955-3bd0ef8e27f9" [ 1511.175106] env[63379]: _type = "Task" [ 1511.175106] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.182576] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5246abca-cf40-c358-2955-3bd0ef8e27f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.217959] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Successfully created port: 9f9986ae-7761-479b-b7eb-9d68c7c70e11 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1511.309882] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039a3495-b45a-4c9d-b6b9-3b9fba01852a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.317702] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680ca763-fb2d-4f90-b09b-0481de6ca9c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.323229] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1511.350103] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb61a34-0d49-472a-b905-0f5a00cfb96b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.360891] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7123055c-88e5-4246-aaaf-d9f6761f4c90 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.365306] env[63379]: DEBUG oslo_concurrency.lockutils [req-9f87d007-033d-4f17-812a-50988d866ccf req-b4c95c53-0ad6-401e-a6d5-a5c3e0164eed service nova] Releasing lock "refresh_cache-0324da80-b97c-4dc9-9083-199fbda60341" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.377537] env[63379]: DEBUG nova.compute.provider_tree [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.381067] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779298, 'name': Rename_Task, 'duration_secs': 1.179835} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.381719] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1511.381964] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09555fc9-bb77-4c42-839c-e28ecdd955ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.389364] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1511.389364] env[63379]: value = "task-1779300" [ 1511.389364] env[63379]: _type = "Task" [ 1511.389364] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.396858] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779300, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.410137] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1511.410413] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ac8b6c92-802b-4f0c-99ba-c0fc2c075cd4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.417872] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1511.417872] env[63379]: value = "task-1779301" [ 1511.417872] env[63379]: _type = "Task" [ 1511.417872] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.427379] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779301, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.685753] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5246abca-cf40-c358-2955-3bd0ef8e27f9, 'name': SearchDatastore_Task, 'duration_secs': 0.028846} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.686595] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1e1c040-7c9b-4075-b9d3-32ef1ca1782e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.691639] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1511.691639] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520adfed-4d7d-c6aa-4312-e390af358e42" [ 1511.691639] env[63379]: _type = "Task" [ 1511.691639] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.699092] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520adfed-4d7d-c6aa-4312-e390af358e42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.882525] env[63379]: DEBUG nova.scheduler.client.report [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1511.900767] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779300, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.927681] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779301, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.121023] env[63379]: DEBUG nova.compute.manager [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Received event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1512.121279] env[63379]: DEBUG nova.compute.manager [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing instance network info cache due to event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1512.121595] env[63379]: DEBUG oslo_concurrency.lockutils [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] Acquiring lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.121807] env[63379]: DEBUG oslo_concurrency.lockutils [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] Acquired lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.122029] env[63379]: DEBUG nova.network.neutron [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1512.203312] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520adfed-4d7d-c6aa-4312-e390af358e42, 'name': SearchDatastore_Task, 'duration_secs': 0.0112} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.203607] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.203911] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0324da80-b97c-4dc9-9083-199fbda60341/0324da80-b97c-4dc9-9083-199fbda60341.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1512.204231] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f12c153-238f-48f2-93c6-836c2cbd78b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.212352] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1512.212352] env[63379]: value = "task-1779302" [ 1512.212352] env[63379]: _type = "Task" [ 1512.212352] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.221576] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.356378] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1512.383788] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1512.384061] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1512.384235] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1512.384426] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1512.384578] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1512.384729] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1512.384966] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1512.385163] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1512.385337] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1512.385505] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1512.385683] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1512.387121] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8fa8c4-51a6-463f-b433-488203618283 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.390143] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.392649] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.771s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.394119] env[63379]: INFO nova.compute.claims [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1512.405030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e8f79f-6379-4f6e-86d1-bd00b0b800a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.413652] env[63379]: DEBUG oslo_vmware.api [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779300, 'name': PowerOnVM_Task, 'duration_secs': 0.642295} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.414641] env[63379]: INFO nova.scheduler.client.report [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Deleted allocations for instance 55fb6899-0321-4bf2-bf3f-2e87dd479433 [ 1512.416726] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1512.416959] env[63379]: INFO nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Took 9.80 seconds to spawn the instance on the hypervisor. [ 1512.417185] env[63379]: DEBUG nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1512.420317] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbd263f-3854-4f03-9529-56b8875546d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.440238] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779301, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.678864] env[63379]: DEBUG nova.compute.manager [req-f4289488-04b4-49d2-b5f7-211d1c67aa1f req-fc88b680-1f7f-4ecf-be77-936244a1b258 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Received event network-vif-plugged-9f9986ae-7761-479b-b7eb-9d68c7c70e11 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1512.679472] env[63379]: DEBUG oslo_concurrency.lockutils [req-f4289488-04b4-49d2-b5f7-211d1c67aa1f req-fc88b680-1f7f-4ecf-be77-936244a1b258 service nova] Acquiring lock "2f98800d-800f-4ad7-bd65-f12879f02ce5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.679865] env[63379]: DEBUG oslo_concurrency.lockutils [req-f4289488-04b4-49d2-b5f7-211d1c67aa1f req-fc88b680-1f7f-4ecf-be77-936244a1b258 service nova] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.680179] env[63379]: DEBUG oslo_concurrency.lockutils [req-f4289488-04b4-49d2-b5f7-211d1c67aa1f req-fc88b680-1f7f-4ecf-be77-936244a1b258 service nova] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.680546] env[63379]: DEBUG nova.compute.manager [req-f4289488-04b4-49d2-b5f7-211d1c67aa1f req-fc88b680-1f7f-4ecf-be77-936244a1b258 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] No waiting events found dispatching network-vif-plugged-9f9986ae-7761-479b-b7eb-9d68c7c70e11 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1512.680817] env[63379]: WARNING nova.compute.manager [req-f4289488-04b4-49d2-b5f7-211d1c67aa1f req-fc88b680-1f7f-4ecf-be77-936244a1b258 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Received unexpected event network-vif-plugged-9f9986ae-7761-479b-b7eb-9d68c7c70e11 for instance with vm_state building and task_state spawning. [ 1512.736651] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779302, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.834618] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Successfully updated port: 9f9986ae-7761-479b-b7eb-9d68c7c70e11 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1512.940114] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779301, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.941850] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03cd40a0-90f0-493c-9e31-d8dd898eb7fe tempest-ServersAdminNegativeTestJSON-132676374 tempest-ServersAdminNegativeTestJSON-132676374-project-member] Lock "55fb6899-0321-4bf2-bf3f-2e87dd479433" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.095s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.952057] env[63379]: DEBUG nova.network.neutron [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updated VIF entry in instance network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1512.952433] env[63379]: DEBUG nova.network.neutron [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.958034] env[63379]: INFO nova.compute.manager [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Took 43.10 seconds to build instance. [ 1513.224107] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712416} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.224406] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0324da80-b97c-4dc9-9083-199fbda60341/0324da80-b97c-4dc9-9083-199fbda60341.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1513.224621] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1513.224893] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bc9b3dd-ac46-43f4-a7c5-98b8345b07f0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.231678] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1513.231678] env[63379]: value = "task-1779303" [ 1513.231678] env[63379]: _type = "Task" [ 1513.231678] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.241528] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.341695] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "refresh_cache-2f98800d-800f-4ad7-bd65-f12879f02ce5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1513.342133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "refresh_cache-2f98800d-800f-4ad7-bd65-f12879f02ce5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.342133] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1514.191011] env[63379]: DEBUG oslo_concurrency.lockutils [req-93640eaf-eb9d-4641-b20b-bb1f9935d3fd req-1ff8dee6-b594-4c34-ac40-edd830d8914c service nova] Releasing lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.191548] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896cf164-7221-4bcf-ae67-456657f79fe9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "758ade2c-7f75-4907-95d5-681d5792ae31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.038s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.192305] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.192409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.192549] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.192763] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.192906] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.196407] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779301, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.196596] env[63379]: WARNING oslo_vmware.common.loopingcall [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] task run outlasted interval by 0.272062 sec [ 1514.197070] env[63379]: INFO nova.compute.manager [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Terminating instance [ 1514.204812] env[63379]: DEBUG nova.compute.manager [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1514.205067] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1514.210292] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10103ac1-b7e7-465e-b773-9a766cbfabc9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.217866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "38be0e8d-188b-4a98-aedc-5d941b63c000" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.217866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.229423] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779301, 'name': CloneVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.229962] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1514.230286] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074524} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.230556] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32ba4fef-8f0a-4fac-bdd3-5b81c3439882 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.232247] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1514.233074] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53203b5d-51c8-4986-b589-81c82b8b11c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.251254] env[63379]: DEBUG oslo_vmware.api [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1514.251254] env[63379]: value = "task-1779304" [ 1514.251254] env[63379]: _type = "Task" [ 1514.251254] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.260248] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 0324da80-b97c-4dc9-9083-199fbda60341/0324da80-b97c-4dc9-9083-199fbda60341.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1514.263782] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1514.266151] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cfc7523-cf2d-4923-b283-6bd2e9fe630f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.290200] env[63379]: DEBUG oslo_vmware.api [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.291609] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1514.291609] env[63379]: value = "task-1779305" [ 1514.291609] env[63379]: _type = "Task" [ 1514.291609] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.303052] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779305, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.493595] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Updating instance_info_cache with network_info: [{"id": "9f9986ae-7761-479b-b7eb-9d68c7c70e11", "address": "fa:16:3e:2f:35:a5", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f9986ae-77", "ovs_interfaceid": "9f9986ae-7761-479b-b7eb-9d68c7c70e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.689170] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3af0ec-672e-4c73-ae6e-93e57cd14706 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.697359] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dc2fc2-91e3-4fc4-ab18-90126a4eb4a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.704216] env[63379]: DEBUG nova.compute.manager [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Received event network-changed-9f9986ae-7761-479b-b7eb-9d68c7c70e11 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1514.704516] env[63379]: DEBUG nova.compute.manager [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Refreshing instance network info cache due to event network-changed-9f9986ae-7761-479b-b7eb-9d68c7c70e11. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1514.704629] env[63379]: DEBUG oslo_concurrency.lockutils [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] Acquiring lock "refresh_cache-2f98800d-800f-4ad7-bd65-f12879f02ce5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.730554] env[63379]: DEBUG nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1514.736987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37857810-47f3-42f9-9fbc-5939749c6ef3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.748530] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55b1801-ad1f-4020-b5da-640aecc09e2c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.752440] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779301, 'name': CloneVM_Task, 'duration_secs': 2.820853} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.753067] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Created linked-clone VM from snapshot [ 1514.754210] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ccc2be-01b6-463e-ac12-34f8600f9dc4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.765226] env[63379]: DEBUG nova.compute.provider_tree [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1514.774387] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Uploading image 23b4b1bf-c822-4cda-95d9-40f3297d68b6 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1514.783977] env[63379]: DEBUG oslo_vmware.api [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779304, 'name': PowerOffVM_Task, 'duration_secs': 0.297655} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.784168] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1514.784338] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1514.784628] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-282018ac-1c3e-47ec-9141-5526c16906c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.804820] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779305, 'name': ReconfigVM_Task, 'duration_secs': 0.339311} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.806925] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1514.806925] env[63379]: value = "vm-369336" [ 1514.806925] env[63379]: _type = "VirtualMachine" [ 1514.806925] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1514.807441] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 0324da80-b97c-4dc9-9083-199fbda60341/0324da80-b97c-4dc9-9083-199fbda60341.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1514.808995] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e9c78d89-b532-4045-abc8-69c10d21d828 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.811303] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e51f8099-4663-45d3-9b66-be9af3a92379 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.819684] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lease: (returnval){ [ 1514.819684] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52351203-4bd2-eca3-4515-eb81f50a0731" [ 1514.819684] env[63379]: _type = "HttpNfcLease" [ 1514.819684] env[63379]: } obtained for exporting VM: (result){ [ 1514.819684] env[63379]: value = "vm-369336" [ 1514.819684] env[63379]: _type = "VirtualMachine" [ 1514.819684] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1514.820163] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the lease: (returnval){ [ 1514.820163] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52351203-4bd2-eca3-4515-eb81f50a0731" [ 1514.820163] env[63379]: _type = "HttpNfcLease" [ 1514.820163] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1514.821457] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1514.821457] env[63379]: value = "task-1779307" [ 1514.821457] env[63379]: _type = "Task" [ 1514.821457] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.834162] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779307, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.835928] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1514.835928] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52351203-4bd2-eca3-4515-eb81f50a0731" [ 1514.835928] env[63379]: _type = "HttpNfcLease" [ 1514.835928] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1514.867502] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1514.867682] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1514.868364] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Deleting the datastore file [datastore1] bf0dd3cf-684c-4378-a89c-5b9f16df062d {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1514.868364] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e52bd99-4f80-4bb5-8d37-fad41eb903cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.876741] env[63379]: DEBUG oslo_vmware.api [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1514.876741] env[63379]: value = "task-1779309" [ 1514.876741] env[63379]: _type = "Task" [ 1514.876741] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.886457] env[63379]: DEBUG oslo_vmware.api [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779309, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.998952] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "refresh_cache-2f98800d-800f-4ad7-bd65-f12879f02ce5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.999350] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Instance network_info: |[{"id": "9f9986ae-7761-479b-b7eb-9d68c7c70e11", "address": "fa:16:3e:2f:35:a5", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f9986ae-77", "ovs_interfaceid": "9f9986ae-7761-479b-b7eb-9d68c7c70e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1514.999662] env[63379]: DEBUG oslo_concurrency.lockutils [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] Acquired lock "refresh_cache-2f98800d-800f-4ad7-bd65-f12879f02ce5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.999851] env[63379]: DEBUG nova.network.neutron [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Refreshing network info cache for port 9f9986ae-7761-479b-b7eb-9d68c7c70e11 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1515.001206] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:35:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f9986ae-7761-479b-b7eb-9d68c7c70e11', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1515.011458] env[63379]: DEBUG oslo.service.loopingcall [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1515.015062] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1515.015562] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fed8ff22-adde-40db-8ffb-beeea7f30cae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.037109] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1515.037109] env[63379]: value = "task-1779310" [ 1515.037109] env[63379]: _type = "Task" [ 1515.037109] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.048039] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779310, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.265702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.298902] env[63379]: ERROR nova.scheduler.client.report [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [req-46c88636-28fd-4a52-8fe4-3be8e8aeba2b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-46c88636-28fd-4a52-8fe4-3be8e8aeba2b"}]} [ 1515.316888] env[63379]: DEBUG nova.scheduler.client.report [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1515.333246] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1515.333246] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52351203-4bd2-eca3-4515-eb81f50a0731" [ 1515.333246] env[63379]: _type = "HttpNfcLease" [ 1515.333246] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1515.336792] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1515.336792] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52351203-4bd2-eca3-4515-eb81f50a0731" [ 1515.336792] env[63379]: _type = "HttpNfcLease" [ 1515.336792] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1515.337202] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779307, 'name': Rename_Task, 'duration_secs': 0.165699} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.338271] env[63379]: DEBUG nova.scheduler.client.report [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1515.338490] env[63379]: DEBUG nova.compute.provider_tree [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1515.341629] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8183f31d-63b6-4267-adc2-e2e1eaf955e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.344983] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1515.347864] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19b937ad-b34a-4f32-8696-be2df40484b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.355871] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252087a-4c3d-8cc0-e5f6-c04032804835/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1515.356139] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252087a-4c3d-8cc0-e5f6-c04032804835/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1515.358715] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1515.358715] env[63379]: value = "task-1779311" [ 1515.358715] env[63379]: _type = "Task" [ 1515.358715] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.359631] env[63379]: DEBUG nova.scheduler.client.report [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1515.432291] env[63379]: DEBUG nova.scheduler.client.report [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1515.444360] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779311, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.451096] env[63379]: DEBUG oslo_vmware.api [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779309, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319012} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.451380] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1515.451571] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1515.451778] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1515.451959] env[63379]: INFO nova.compute.manager [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1515.452487] env[63379]: DEBUG oslo.service.loopingcall [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1515.452487] env[63379]: DEBUG nova.compute.manager [-] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1515.452627] env[63379]: DEBUG nova.network.neutron [-] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1515.513012] env[63379]: DEBUG nova.network.neutron [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Updated VIF entry in instance network info cache for port 9f9986ae-7761-479b-b7eb-9d68c7c70e11. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1515.513422] env[63379]: DEBUG nova.network.neutron [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Updating instance_info_cache with network_info: [{"id": "9f9986ae-7761-479b-b7eb-9d68c7c70e11", "address": "fa:16:3e:2f:35:a5", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f9986ae-77", "ovs_interfaceid": "9f9986ae-7761-479b-b7eb-9d68c7c70e11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.518336] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ae41110f-5075-4700-a231-a421eab824cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.561952] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779310, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.874902] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779311, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.016410] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aacfbd52-9e25-4d1f-8c2a-677c52f8cb16 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.019621] env[63379]: DEBUG oslo_concurrency.lockutils [req-b3326c16-c911-44cb-a407-1727077e5a09 req-69149416-70a2-4f2e-8f8d-555490ddf446 service nova] Releasing lock "refresh_cache-2f98800d-800f-4ad7-bd65-f12879f02ce5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.025700] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739500de-8b26-4a9e-a896-1eae11cbb9c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.063573] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87e3e8f-eee2-45d1-be18-027c169479df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.073431] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779310, 'name': CreateVM_Task, 'duration_secs': 0.573003} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.075885] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1516.076914] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.077303] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.077759] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1516.079344] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4994bc17-7db9-45d1-b60d-6d252a49e908 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.083616] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fe613bb-4421-4b5b-bc48-1f998ba6cccc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.090201] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1516.090201] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c85a74-c732-3cff-7f6e-616c99236c75" [ 1516.090201] env[63379]: _type = "Task" [ 1516.090201] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.101608] env[63379]: DEBUG nova.compute.provider_tree [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1516.114079] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c85a74-c732-3cff-7f6e-616c99236c75, 'name': SearchDatastore_Task, 'duration_secs': 0.013255} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.116110] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.116110] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1516.116110] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.116110] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.116576] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1516.117285] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3636f00-f87b-4d4d-893a-511957ab888a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.129637] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1516.130208] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1516.131446] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2673531-4a87-4b29-b97f-23be4d3a2f26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.139054] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1516.139054] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da41cc-50ed-aa0b-7ddf-575393ff4ecf" [ 1516.139054] env[63379]: _type = "Task" [ 1516.139054] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.149227] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da41cc-50ed-aa0b-7ddf-575393ff4ecf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.376064] env[63379]: DEBUG oslo_vmware.api [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779311, 'name': PowerOnVM_Task, 'duration_secs': 0.530777} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.377045] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1516.377332] env[63379]: INFO nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1516.377581] env[63379]: DEBUG nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1516.379427] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c24d905-2ce3-4eee-8ef2-53f21e8eff02 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.608737] env[63379]: DEBUG nova.scheduler.client.report [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1516.653511] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da41cc-50ed-aa0b-7ddf-575393ff4ecf, 'name': SearchDatastore_Task, 'duration_secs': 0.02501} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.654537] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96c86400-e55f-4502-b991-c271cba34af4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.663132] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1516.663132] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5289da41-3345-d8c2-0a75-2cc7a55854c9" [ 1516.663132] env[63379]: _type = "Task" [ 1516.663132] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.672666] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5289da41-3345-d8c2-0a75-2cc7a55854c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.739116] env[63379]: DEBUG nova.compute.manager [req-92e12ae7-af18-49af-ba38-21293bf44622 req-5a4c8506-2ded-4fba-b915-b09f3a72d3b2 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Received event network-vif-deleted-1913f18a-c402-444f-bfec-50a3ab88167d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1516.739473] env[63379]: INFO nova.compute.manager [req-92e12ae7-af18-49af-ba38-21293bf44622 req-5a4c8506-2ded-4fba-b915-b09f3a72d3b2 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Neutron deleted interface 1913f18a-c402-444f-bfec-50a3ab88167d; detaching it from the instance and deleting it from the info cache [ 1516.739599] env[63379]: DEBUG nova.network.neutron [req-92e12ae7-af18-49af-ba38-21293bf44622 req-5a4c8506-2ded-4fba-b915-b09f3a72d3b2 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.817455] env[63379]: DEBUG nova.network.neutron [-] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.905790] env[63379]: INFO nova.compute.manager [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Took 31.22 seconds to build instance. [ 1517.115964] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.723s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.116556] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1517.119703] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.707s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.119943] env[63379]: DEBUG nova.objects.instance [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lazy-loading 'resources' on Instance uuid bc7baa1a-f65d-41d4-ad86-de041fbb2306 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1517.192063] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5289da41-3345-d8c2-0a75-2cc7a55854c9, 'name': SearchDatastore_Task, 'duration_secs': 0.021123} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.192600] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.192897] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2f98800d-800f-4ad7-bd65-f12879f02ce5/2f98800d-800f-4ad7-bd65-f12879f02ce5.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1517.193288] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9015b5c1-290a-43a0-88e9-59f78ec6bf1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.205101] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1517.205101] env[63379]: value = "task-1779312" [ 1517.205101] env[63379]: _type = "Task" [ 1517.205101] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.214109] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.242331] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6145b9c5-e65b-4e10-9f09-b47d09bea051 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.254021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfd55f3-46bd-4849-ae45-4f0609f6c0f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.299362] env[63379]: DEBUG nova.compute.manager [req-92e12ae7-af18-49af-ba38-21293bf44622 req-5a4c8506-2ded-4fba-b915-b09f3a72d3b2 service nova] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Detach interface failed, port_id=1913f18a-c402-444f-bfec-50a3ab88167d, reason: Instance bf0dd3cf-684c-4378-a89c-5b9f16df062d could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1517.324668] env[63379]: INFO nova.compute.manager [-] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Took 1.87 seconds to deallocate network for instance. [ 1517.409052] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c63acfb7-a909-4d1a-a7cb-f9fa26072461 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "0324da80-b97c-4dc9-9083-199fbda60341" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.881s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.623407] env[63379]: DEBUG nova.compute.utils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1517.628196] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1517.628390] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1517.723088] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779312, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.745124] env[63379]: DEBUG nova.policy [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db6bd541e63b47e29e5c02fc02f162c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3d2c2429642ee92f4bb7e53b0a128', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1517.834905] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.915587] env[63379]: DEBUG nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1518.130842] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1518.216183] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.716377} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.216566] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2f98800d-800f-4ad7-bd65-f12879f02ce5/2f98800d-800f-4ad7-bd65-f12879f02ce5.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1518.216814] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1518.218924] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20b1cc53-924d-491d-94af-64bc0f942df5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.234422] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1518.234422] env[63379]: value = "task-1779313" [ 1518.234422] env[63379]: _type = "Task" [ 1518.234422] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.244675] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.274906] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Successfully created port: 6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1518.315148] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6953746-b559-4625-b613-d070130156a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.329229] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94806029-0616-4008-bc73-3a3c28a40e5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.371548] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571a26e8-1a9a-481f-9627-61684c00d1fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.380288] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72950c5-04a8-4e8d-9853-2eb2bc7c269d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.397138] env[63379]: DEBUG nova.compute.provider_tree [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.438508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.749366] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.901139] env[63379]: DEBUG nova.scheduler.client.report [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1518.990693] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "ec1f7a44-7344-43fb-9d51-688731d8ce14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.990693] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.142332] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1519.174569] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1519.174850] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1519.175090] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1519.175301] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1519.175456] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1519.175609] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1519.175821] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1519.175986] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1519.176176] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1519.176342] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1519.177493] env[63379]: DEBUG nova.virt.hardware [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1519.177493] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1253591-bcb7-46e5-9f40-e15ba0efd42a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.191799] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce45d121-35c1-4e01-ab66-e8ab5a6c4e23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.244732] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.801961} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.246074] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1519.246074] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a336ab7f-553b-479f-a52a-ca5cd39353e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.269763] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 2f98800d-800f-4ad7-bd65-f12879f02ce5/2f98800d-800f-4ad7-bd65-f12879f02ce5.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1519.270118] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-086dc2d3-b9cb-44c4-9c47-02dc970bbe58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.291811] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1519.291811] env[63379]: value = "task-1779314" [ 1519.291811] env[63379]: _type = "Task" [ 1519.291811] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.301015] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.406428] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.287s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.408980] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.661s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.410566] env[63379]: INFO nova.compute.claims [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1519.435087] env[63379]: INFO nova.scheduler.client.report [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Deleted allocations for instance bc7baa1a-f65d-41d4-ad86-de041fbb2306 [ 1519.803982] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779314, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.839717] env[63379]: DEBUG nova.compute.manager [req-77908d1f-510b-4414-a85c-e8b03381b338 req-63b25ad9-cda6-4e2c-8b78-fe314cba5222 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Received event network-vif-plugged-6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1519.839717] env[63379]: DEBUG oslo_concurrency.lockutils [req-77908d1f-510b-4414-a85c-e8b03381b338 req-63b25ad9-cda6-4e2c-8b78-fe314cba5222 service nova] Acquiring lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.839717] env[63379]: DEBUG oslo_concurrency.lockutils [req-77908d1f-510b-4414-a85c-e8b03381b338 req-63b25ad9-cda6-4e2c-8b78-fe314cba5222 service nova] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.839717] env[63379]: DEBUG oslo_concurrency.lockutils [req-77908d1f-510b-4414-a85c-e8b03381b338 req-63b25ad9-cda6-4e2c-8b78-fe314cba5222 service nova] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.839717] env[63379]: DEBUG nova.compute.manager [req-77908d1f-510b-4414-a85c-e8b03381b338 req-63b25ad9-cda6-4e2c-8b78-fe314cba5222 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] No waiting events found dispatching network-vif-plugged-6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1519.839717] env[63379]: WARNING nova.compute.manager [req-77908d1f-510b-4414-a85c-e8b03381b338 req-63b25ad9-cda6-4e2c-8b78-fe314cba5222 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Received unexpected event network-vif-plugged-6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d for instance with vm_state building and task_state spawning. [ 1519.947612] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3182f00-aa4c-4c66-93f9-b2195935bd42 tempest-ServerGroupTestJSON-1784230594 tempest-ServerGroupTestJSON-1784230594-project-member] Lock "bc7baa1a-f65d-41d4-ad86-de041fbb2306" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.495s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.012765] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Successfully updated port: 6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.302885] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779314, 'name': ReconfigVM_Task, 'duration_secs': 0.578598} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.303216] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 2f98800d-800f-4ad7-bd65-f12879f02ce5/2f98800d-800f-4ad7-bd65-f12879f02ce5.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1520.303837] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-22433087-844e-4731-9629-5aadf4995b05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.311900] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1520.311900] env[63379]: value = "task-1779315" [ 1520.311900] env[63379]: _type = "Task" [ 1520.311900] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.320623] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779315, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.516030] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "refresh_cache-e838f54f-99f2-4f39-a9d2-725be8a5b3ce" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.516244] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "refresh_cache-e838f54f-99f2-4f39-a9d2-725be8a5b3ce" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.516446] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1520.825121] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779315, 'name': Rename_Task, 'duration_secs': 0.182801} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.825121] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1520.825121] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95de25d9-795f-46dd-b130-df9fd70ac5a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.835942] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1520.835942] env[63379]: value = "task-1779316" [ 1520.835942] env[63379]: _type = "Task" [ 1520.835942] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.851775] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779316, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.969924] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d362fc4a-d0c7-4779-b982-ad015c6ea38c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.978243] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717f3dba-7dd0-4ebe-b5bf-bd65e3273da0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.010058] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746caf8b-06e3-48e4-95b6-ad0d4255cbbc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.018249] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade599db-63e9-41a5-94fd-9e2132efcdd0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.035293] env[63379]: DEBUG nova.compute.provider_tree [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.083192] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1521.328455] env[63379]: DEBUG nova.network.neutron [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Updating instance_info_cache with network_info: [{"id": "6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d", "address": "fa:16:3e:52:20:a0", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ffb0ce3-d1", "ovs_interfaceid": "6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.350231] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779316, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.541075] env[63379]: DEBUG nova.scheduler.client.report [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1521.834018] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "refresh_cache-e838f54f-99f2-4f39-a9d2-725be8a5b3ce" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.834018] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Instance network_info: |[{"id": "6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d", "address": "fa:16:3e:52:20:a0", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ffb0ce3-d1", "ovs_interfaceid": "6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1521.834018] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:20:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1521.843613] env[63379]: DEBUG oslo.service.loopingcall [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.844936] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1521.849483] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3db3f05b-d455-4eaa-b9e7-300286b6108a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.874012] env[63379]: DEBUG nova.compute.manager [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Received event network-changed-6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1521.874012] env[63379]: DEBUG nova.compute.manager [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Refreshing instance network info cache due to event network-changed-6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1521.874012] env[63379]: DEBUG oslo_concurrency.lockutils [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] Acquiring lock "refresh_cache-e838f54f-99f2-4f39-a9d2-725be8a5b3ce" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.874197] env[63379]: DEBUG oslo_concurrency.lockutils [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] Acquired lock "refresh_cache-e838f54f-99f2-4f39-a9d2-725be8a5b3ce" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.875147] env[63379]: DEBUG nova.network.neutron [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Refreshing network info cache for port 6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1521.883589] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779316, 'name': PowerOnVM_Task, 'duration_secs': 0.586594} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.885416] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1521.885702] env[63379]: INFO nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Took 9.53 seconds to spawn the instance on the hypervisor. [ 1521.885916] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1521.886268] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1521.886268] env[63379]: value = "task-1779317" [ 1521.886268] env[63379]: _type = "Task" [ 1521.886268] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.890326] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88c0714-4d28-4d1f-8dfd-e7dbdb6749d3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.901981] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779317, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.050012] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.051559] env[63379]: DEBUG nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1522.057032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.862s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.057032] env[63379]: INFO nova.compute.claims [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.404602] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779317, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.421091] env[63379]: INFO nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Took 31.61 seconds to build instance. [ 1522.561493] env[63379]: DEBUG nova.compute.utils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1522.563027] env[63379]: DEBUG nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1522.563279] env[63379]: DEBUG nova.network.neutron [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1522.623996] env[63379]: DEBUG nova.policy [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e877d627a40b414c8175c9ebd8b5310b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2618e088eb0b4a16882dcd26273ed7c6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1522.908770] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779317, 'name': CreateVM_Task, 'duration_secs': 0.815767} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.909596] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1522.910443] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.910640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.911068] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1522.911357] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa6071c3-e213-44e6-92f3-22144fae57f8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.919141] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1522.919141] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e2ebe7-c6bc-51d8-ce3a-b75f5fdc49a1" [ 1522.919141] env[63379]: _type = "Task" [ 1522.919141] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.923697] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.371s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.931702] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e2ebe7-c6bc-51d8-ce3a-b75f5fdc49a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.058328] env[63379]: DEBUG nova.network.neutron [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Successfully created port: 413ced3b-b54d-4b64-93a1-7a9b2b9857fc {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1523.070222] env[63379]: DEBUG nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1523.080571] env[63379]: DEBUG nova.network.neutron [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Updated VIF entry in instance network info cache for port 6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1523.080789] env[63379]: DEBUG nova.network.neutron [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Updating instance_info_cache with network_info: [{"id": "6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d", "address": "fa:16:3e:52:20:a0", "network": {"id": "4d473417-d5ca-4b8c-bb54-df6a37cef0bf", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-2006058150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3d2c2429642ee92f4bb7e53b0a128", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ffb0ce3-d1", "ovs_interfaceid": "6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.431487] env[63379]: DEBUG nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1523.436805] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e2ebe7-c6bc-51d8-ce3a-b75f5fdc49a1, 'name': SearchDatastore_Task, 'duration_secs': 0.015217} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.440133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.440313] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1523.440573] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.440688] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.440870] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1523.441381] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff53e963-73cc-4716-999c-e30a7979caad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.452655] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1523.452946] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1523.453713] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78793d8f-2f51-4f46-8705-8c46478bb5e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.461296] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1523.461296] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e9573-ada2-54cc-1313-eb4b40046c0f" [ 1523.461296] env[63379]: _type = "Task" [ 1523.461296] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.473269] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e9573-ada2-54cc-1313-eb4b40046c0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.586840] env[63379]: DEBUG oslo_concurrency.lockutils [req-986fd647-8e17-4896-aa9b-0bb05a06de10 req-2fb5550d-8705-4e27-a249-de28a22dbe62 service nova] Releasing lock "refresh_cache-e838f54f-99f2-4f39-a9d2-725be8a5b3ce" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.685021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a16f44f-0ad3-41ef-8bb4-a0c39acfeedf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.692211] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00ac788-f976-4a6f-9174-d2f3e5c7fdce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.728442] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f740fb3a-9964-49d3-ab51-065386aff424 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.738956] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a921b338-13f3-45f7-a449-229eaa1a1a1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.757135] env[63379]: DEBUG nova.compute.provider_tree [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.776540] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "2a996f06-542e-4f71-95a4-0f71097d1478" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.776540] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "2a996f06-542e-4f71-95a4-0f71097d1478" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.967995] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.979756] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e9573-ada2-54cc-1313-eb4b40046c0f, 'name': SearchDatastore_Task, 'duration_secs': 0.01454} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.980861] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-939c3dec-0ff2-4d44-be16-c25a2b241649 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.988111] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1523.988111] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5286e9a8-2a8f-1c90-6b30-ee5b12c8a705" [ 1523.988111] env[63379]: _type = "Task" [ 1523.988111] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.997448] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5286e9a8-2a8f-1c90-6b30-ee5b12c8a705, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.089398] env[63379]: DEBUG nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1524.127611] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.127979] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.128243] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.128544] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.128859] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.129115] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.129417] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.129644] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.129865] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.130142] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.130429] env[63379]: DEBUG nova.virt.hardware [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.131474] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb27aa9-f0d1-4dae-99ef-bc765d6a4993 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.140658] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5360ee11-bbb3-43a7-9066-3d96a469c609 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.265029] env[63379]: DEBUG nova.scheduler.client.report [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1524.503776] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5286e9a8-2a8f-1c90-6b30-ee5b12c8a705, 'name': SearchDatastore_Task, 'duration_secs': 0.013821} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.505569] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.506063] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] e838f54f-99f2-4f39-a9d2-725be8a5b3ce/e838f54f-99f2-4f39-a9d2-725be8a5b3ce.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1524.506500] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b86077cd-7bf5-45cf-a3c2-85b6c8c58eaa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.517680] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1524.517680] env[63379]: value = "task-1779318" [ 1524.517680] env[63379]: _type = "Task" [ 1524.517680] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.532999] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.646213] env[63379]: DEBUG nova.compute.manager [req-92febb8f-8c09-446b-a95a-9356437199be req-659e141c-55fb-40e3-86d3-555ad9b05670 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Received event network-vif-plugged-413ced3b-b54d-4b64-93a1-7a9b2b9857fc {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1524.646445] env[63379]: DEBUG oslo_concurrency.lockutils [req-92febb8f-8c09-446b-a95a-9356437199be req-659e141c-55fb-40e3-86d3-555ad9b05670 service nova] Acquiring lock "c439fe86-fc43-4c05-a4b7-3634a043269a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.646665] env[63379]: DEBUG oslo_concurrency.lockutils [req-92febb8f-8c09-446b-a95a-9356437199be req-659e141c-55fb-40e3-86d3-555ad9b05670 service nova] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.646838] env[63379]: DEBUG oslo_concurrency.lockutils [req-92febb8f-8c09-446b-a95a-9356437199be req-659e141c-55fb-40e3-86d3-555ad9b05670 service nova] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.647015] env[63379]: DEBUG nova.compute.manager [req-92febb8f-8c09-446b-a95a-9356437199be req-659e141c-55fb-40e3-86d3-555ad9b05670 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] No waiting events found dispatching network-vif-plugged-413ced3b-b54d-4b64-93a1-7a9b2b9857fc {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1524.647615] env[63379]: WARNING nova.compute.manager [req-92febb8f-8c09-446b-a95a-9356437199be req-659e141c-55fb-40e3-86d3-555ad9b05670 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Received unexpected event network-vif-plugged-413ced3b-b54d-4b64-93a1-7a9b2b9857fc for instance with vm_state building and task_state spawning. [ 1524.742767] env[63379]: DEBUG nova.network.neutron [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Successfully updated port: 413ced3b-b54d-4b64-93a1-7a9b2b9857fc {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1524.775826] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.776407] env[63379]: DEBUG nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1524.779020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 24.115s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.936420] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "ac596f08-86a3-42e0-86e6-41a173fe868f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.936716] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "ac596f08-86a3-42e0-86e6-41a173fe868f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.029286] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779318, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.246543] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "refresh_cache-c439fe86-fc43-4c05-a4b7-3634a043269a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.246719] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquired lock "refresh_cache-c439fe86-fc43-4c05-a4b7-3634a043269a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.246788] env[63379]: DEBUG nova.network.neutron [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1525.293947] env[63379]: DEBUG nova.compute.utils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.296521] env[63379]: DEBUG nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1525.296735] env[63379]: DEBUG nova.network.neutron [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.359931] env[63379]: DEBUG nova.policy [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae201bf75acb480196f69cddc0f47523', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e17ea72d033544159bbaea7365a7f221', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1525.529915] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779318, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.761577} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.530429] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] e838f54f-99f2-4f39-a9d2-725be8a5b3ce/e838f54f-99f2-4f39-a9d2-725be8a5b3ce.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1525.530745] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1525.531109] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8219857-85f0-4bdf-97ee-2f380b67c48f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.540735] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1525.540735] env[63379]: value = "task-1779319" [ 1525.540735] env[63379]: _type = "Task" [ 1525.540735] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.553242] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.767203] env[63379]: DEBUG nova.network.neutron [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Successfully created port: 3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1525.779989] env[63379]: DEBUG nova.network.neutron [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1525.793942] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252087a-4c3d-8cc0-e5f6-c04032804835/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1525.794856] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab4f2d0-a61e-493c-bff9-7c0a23724fe4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.799499] env[63379]: DEBUG nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1525.814565] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252087a-4c3d-8cc0-e5f6-c04032804835/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1525.814757] env[63379]: ERROR oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252087a-4c3d-8cc0-e5f6-c04032804835/disk-0.vmdk due to incomplete transfer. [ 1525.815044] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9bbd8236-bacb-4b7a-82f4-d8bcd16a12a5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d47be684-6cd8-45c6-8c6a-9a6db0390f97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance bf0dd3cf-684c-4378-a89c-5b9f16df062d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 41952d7b-ce23-4e9b-8843-bbac1d3099c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 941ac23c-6aa9-4ed1-840a-326423b7cbc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d2f5b406-3d0e-4150-aeaf-7cdacbc12c06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 08465a2c-1ab6-4c53-9b12-3cd51c717b03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 915aec20-5765-4aad-8b0f-f2d71b7d6428 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 04234ba7-24a3-48e5-9f62-6f4dddd0054a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.828467] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 07cc8cd7-8368-41dd-ae13-01c8275cac9e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1525.828467] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a78feafb-00bc-44c4-acd3-a36fb8a81767 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1525.829568] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.829568] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance ee36cc5f-61a1-4e4f-9cae-670f5868d90c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.829568] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance aa44a4ff-14e5-42d2-a082-06fe0ae9646c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.829568] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance aedff32b-b0c2-4a93-a2c6-349d26839cc4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.829733] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 758ade2c-7f75-4907-95d5-681d5792ae31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.829810] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 0324da80-b97c-4dc9-9083-199fbda60341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.829919] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 2f98800d-800f-4ad7-bd65-f12879f02ce5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.830072] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance e838f54f-99f2-4f39-a9d2-725be8a5b3ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.830165] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance c439fe86-fc43-4c05-a4b7-3634a043269a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.830281] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 318355e9-b4cc-4645-ac51-b583d14e1134 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.837132] env[63379]: DEBUG oslo_vmware.rw_handles [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5252087a-4c3d-8cc0-e5f6-c04032804835/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1525.837359] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Uploaded image 23b4b1bf-c822-4cda-95d9-40f3297d68b6 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1525.839403] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1525.840215] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-01e122a0-f1ec-4a0b-ab9d-0dbff5b9cc23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.848863] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1525.848863] env[63379]: value = "task-1779320" [ 1525.848863] env[63379]: _type = "Task" [ 1525.848863] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.860485] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779320, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.988259] env[63379]: DEBUG nova.network.neutron [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Updating instance_info_cache with network_info: [{"id": "413ced3b-b54d-4b64-93a1-7a9b2b9857fc", "address": "fa:16:3e:4b:4c:7d", "network": {"id": "971209d3-a9b9-4857-964a-59fe7ffa394e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1103991925-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2618e088eb0b4a16882dcd26273ed7c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap413ced3b-b5", "ovs_interfaceid": "413ced3b-b54d-4b64-93a1-7a9b2b9857fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.052628] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115793} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.053033] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1526.053708] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f86a5ae-b59c-4807-8694-4ec76329cb4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.076221] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] e838f54f-99f2-4f39-a9d2-725be8a5b3ce/e838f54f-99f2-4f39-a9d2-725be8a5b3ce.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.076859] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e526eb8-730c-4206-abf0-19141fd72955 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.101509] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1526.101509] env[63379]: value = "task-1779321" [ 1526.101509] env[63379]: _type = "Task" [ 1526.101509] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.111876] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.340749] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 650d4709-3cbc-4b9a-b165-66fa0af97c4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1526.361243] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779320, 'name': Destroy_Task, 'duration_secs': 0.368111} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.361608] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Destroyed the VM [ 1526.361868] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1526.362151] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b541bee8-3110-4067-a303-6ca19ff651e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.370971] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1526.370971] env[63379]: value = "task-1779322" [ 1526.370971] env[63379]: _type = "Task" [ 1526.370971] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.379626] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779322, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.494099] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Releasing lock "refresh_cache-c439fe86-fc43-4c05-a4b7-3634a043269a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.494463] env[63379]: DEBUG nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Instance network_info: |[{"id": "413ced3b-b54d-4b64-93a1-7a9b2b9857fc", "address": "fa:16:3e:4b:4c:7d", "network": {"id": "971209d3-a9b9-4857-964a-59fe7ffa394e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1103991925-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2618e088eb0b4a16882dcd26273ed7c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap413ced3b-b5", "ovs_interfaceid": "413ced3b-b54d-4b64-93a1-7a9b2b9857fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1526.494913] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:4c:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '413ced3b-b54d-4b64-93a1-7a9b2b9857fc', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1526.502530] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Creating folder: Project (2618e088eb0b4a16882dcd26273ed7c6). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1526.502800] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ec71db1-39c4-48de-b067-7e21d1691f45 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.514800] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Created folder: Project (2618e088eb0b4a16882dcd26273ed7c6) in parent group-v369214. [ 1526.514800] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Creating folder: Instances. Parent ref: group-v369339. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1526.515187] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-895a3611-bcb7-4958-9647-643d1fb91a2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.525314] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Created folder: Instances in parent group-v369339. [ 1526.525576] env[63379]: DEBUG oslo.service.loopingcall [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1526.525776] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1526.525988] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-138d9ce3-0835-4ed6-acc1-ab41a172227a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.545084] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1526.545084] env[63379]: value = "task-1779325" [ 1526.545084] env[63379]: _type = "Task" [ 1526.545084] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.552936] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779325, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.611294] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779321, 'name': ReconfigVM_Task, 'duration_secs': 0.303551} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.611581] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Reconfigured VM instance instance-0000002a to attach disk [datastore1] e838f54f-99f2-4f39-a9d2-725be8a5b3ce/e838f54f-99f2-4f39-a9d2-725be8a5b3ce.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1526.612331] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2947cc69-dc8d-461a-b924-7b8ed012fa4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.621453] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1526.621453] env[63379]: value = "task-1779326" [ 1526.621453] env[63379]: _type = "Task" [ 1526.621453] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.630159] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779326, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.672027] env[63379]: DEBUG nova.compute.manager [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Received event network-changed-413ced3b-b54d-4b64-93a1-7a9b2b9857fc {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1526.672244] env[63379]: DEBUG nova.compute.manager [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Refreshing instance network info cache due to event network-changed-413ced3b-b54d-4b64-93a1-7a9b2b9857fc. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1526.672480] env[63379]: DEBUG oslo_concurrency.lockutils [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] Acquiring lock "refresh_cache-c439fe86-fc43-4c05-a4b7-3634a043269a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.672629] env[63379]: DEBUG oslo_concurrency.lockutils [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] Acquired lock "refresh_cache-c439fe86-fc43-4c05-a4b7-3634a043269a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.672792] env[63379]: DEBUG nova.network.neutron [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Refreshing network info cache for port 413ced3b-b54d-4b64-93a1-7a9b2b9857fc {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1526.817130] env[63379]: DEBUG nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1526.841816] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1526.842127] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1526.842302] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.842502] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1526.842648] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.842802] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1526.843088] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1526.843270] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1526.843448] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1526.843719] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1526.843845] env[63379]: DEBUG nova.virt.hardware [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1526.844632] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 5aa36799-251b-4933-8ccd-8125995b1f8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1526.846702] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3628d288-9175-478d-a894-261dca54f19b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.857215] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be61e84-5185-4c43-a4e3-395e64f57c34 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.884241] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779322, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.055611] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779325, 'name': CreateVM_Task, 'duration_secs': 0.464024} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.055906] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1527.056514] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.056694] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.057035] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1527.057321] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0461a7a-f986-45bb-8bf8-5c5b51e292e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.062018] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1527.062018] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234f652-8d5d-6449-0e7d-9e0178b82a59" [ 1527.062018] env[63379]: _type = "Task" [ 1527.062018] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.069907] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234f652-8d5d-6449-0e7d-9e0178b82a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.132030] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779326, 'name': Rename_Task, 'duration_secs': 0.149142} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.132030] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1527.132193] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08bdff3b-55a7-49ca-8b97-d7f96e0e3b28 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.140215] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1527.140215] env[63379]: value = "task-1779327" [ 1527.140215] env[63379]: _type = "Task" [ 1527.140215] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.158144] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779327, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.350726] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 158fe346-93f5-422b-877a-8423547da58f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1527.383733] env[63379]: DEBUG oslo_vmware.api [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779322, 'name': RemoveSnapshot_Task, 'duration_secs': 0.580754} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.384022] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1527.384274] env[63379]: INFO nova.compute.manager [None req-f163ccaa-7de0-4be8-a358-a6c9a5f91e08 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Took 18.57 seconds to snapshot the instance on the hypervisor. [ 1527.400341] env[63379]: DEBUG nova.network.neutron [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Updated VIF entry in instance network info cache for port 413ced3b-b54d-4b64-93a1-7a9b2b9857fc. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1527.400703] env[63379]: DEBUG nova.network.neutron [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Updating instance_info_cache with network_info: [{"id": "413ced3b-b54d-4b64-93a1-7a9b2b9857fc", "address": "fa:16:3e:4b:4c:7d", "network": {"id": "971209d3-a9b9-4857-964a-59fe7ffa394e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1103991925-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2618e088eb0b4a16882dcd26273ed7c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap413ced3b-b5", "ovs_interfaceid": "413ced3b-b54d-4b64-93a1-7a9b2b9857fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.424971] env[63379]: DEBUG nova.network.neutron [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Successfully updated port: 3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.572819] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234f652-8d5d-6449-0e7d-9e0178b82a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.654349] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779327, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.853880] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f10fe64d-a09e-488a-b609-3e38922cf2e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1527.903679] env[63379]: DEBUG oslo_concurrency.lockutils [req-673037eb-3cd3-4247-a9de-fca38a9a1fcb req-311a8312-92ab-4812-8b26-6a28f9e68777 service nova] Releasing lock "refresh_cache-c439fe86-fc43-4c05-a4b7-3634a043269a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.927575] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.927726] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.927880] env[63379]: DEBUG nova.network.neutron [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1528.074689] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234f652-8d5d-6449-0e7d-9e0178b82a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.151785] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779327, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.359079] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d2de9da-9dfe-42d2-b206-bb5139b1970b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1528.461285] env[63379]: DEBUG nova.network.neutron [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1528.578173] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234f652-8d5d-6449-0e7d-9e0178b82a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.628238] env[63379]: DEBUG nova.network.neutron [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updating instance_info_cache with network_info: [{"id": "3c22cde0-746e-43ec-b075-e14c004043c4", "address": "fa:16:3e:2b:70:a9", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c22cde0-74", "ovs_interfaceid": "3c22cde0-746e-43ec-b075-e14c004043c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.653135] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779327, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.722501] env[63379]: DEBUG nova.compute.manager [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Received event network-vif-plugged-3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1528.722749] env[63379]: DEBUG oslo_concurrency.lockutils [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] Acquiring lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.722983] env[63379]: DEBUG oslo_concurrency.lockutils [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] Lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.723195] env[63379]: DEBUG oslo_concurrency.lockutils [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] Lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.723395] env[63379]: DEBUG nova.compute.manager [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] No waiting events found dispatching network-vif-plugged-3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1528.723542] env[63379]: WARNING nova.compute.manager [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Received unexpected event network-vif-plugged-3c22cde0-746e-43ec-b075-e14c004043c4 for instance with vm_state building and task_state spawning. [ 1528.723707] env[63379]: DEBUG nova.compute.manager [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Received event network-changed-3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1528.723863] env[63379]: DEBUG nova.compute.manager [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Refreshing instance network info cache due to event network-changed-3c22cde0-746e-43ec-b075-e14c004043c4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1528.724046] env[63379]: DEBUG oslo_concurrency.lockutils [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] Acquiring lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.862012] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.075745] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234f652-8d5d-6449-0e7d-9e0178b82a59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.130547] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Releasing lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.130906] env[63379]: DEBUG nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance network_info: |[{"id": "3c22cde0-746e-43ec-b075-e14c004043c4", "address": "fa:16:3e:2b:70:a9", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c22cde0-74", "ovs_interfaceid": "3c22cde0-746e-43ec-b075-e14c004043c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1529.131217] env[63379]: DEBUG oslo_concurrency.lockutils [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] Acquired lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.131404] env[63379]: DEBUG nova.network.neutron [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Refreshing network info cache for port 3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1529.132734] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:70:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '76e60ff4-204c-4f48-bd0e-2d5fa0a812ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c22cde0-746e-43ec-b075-e14c004043c4', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.140206] env[63379]: DEBUG oslo.service.loopingcall [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.141202] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.141444] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe922af3-e83e-47cf-a701-2eb06212217d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.165055] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779327, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.166385] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.166385] env[63379]: value = "task-1779328" [ 1529.166385] env[63379]: _type = "Task" [ 1529.166385] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.174725] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779328, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.365290] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f082cdd7-228e-4100-b301-5af6daea9b36 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.577839] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234f652-8d5d-6449-0e7d-9e0178b82a59, 'name': SearchDatastore_Task, 'duration_secs': 2.48947} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.578355] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.578456] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1529.578694] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.578846] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.579042] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1529.579325] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b04b4cb-c3ed-4c7b-8ebc-94d66d6996a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.590791] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1529.591034] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1529.591767] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9848bd64-9336-4a25-8633-cb3209899b41 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.598517] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1529.598517] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5265c9c4-3747-a155-4529-0614296ab230" [ 1529.598517] env[63379]: _type = "Task" [ 1529.598517] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.608798] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5265c9c4-3747-a155-4529-0614296ab230, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.667646] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779327, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.679087] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779328, 'name': CreateVM_Task, 'duration_secs': 0.347668} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.680276] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1529.680276] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.680396] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.680652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1529.681041] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c6a03c1-5790-4986-a45b-41eec8c4c2cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.686302] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1529.686302] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526569c5-e068-4020-7294-a9330e2eb53b" [ 1529.686302] env[63379]: _type = "Task" [ 1529.686302] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.697431] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526569c5-e068-4020-7294-a9330e2eb53b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.739381] env[63379]: DEBUG nova.compute.manager [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1529.740362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c976d8-b7b0-4ca1-b6c3-57b0cbab7087 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.869038] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d76a28f-822d-4b4f-be2f-2ad3371b3979 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.886290] env[63379]: DEBUG nova.network.neutron [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updated VIF entry in instance network info cache for port 3c22cde0-746e-43ec-b075-e14c004043c4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1529.887621] env[63379]: DEBUG nova.network.neutron [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updating instance_info_cache with network_info: [{"id": "3c22cde0-746e-43ec-b075-e14c004043c4", "address": "fa:16:3e:2b:70:a9", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c22cde0-74", "ovs_interfaceid": "3c22cde0-746e-43ec-b075-e14c004043c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.112660] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5265c9c4-3747-a155-4529-0614296ab230, 'name': SearchDatastore_Task, 'duration_secs': 0.011748} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.114028] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46676ae2-ccb0-414c-9903-7c425a097401 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.120779] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1530.120779] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529c2297-4ee1-f2e4-2bea-1dfb2a8c2dd0" [ 1530.120779] env[63379]: _type = "Task" [ 1530.120779] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.132467] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529c2297-4ee1-f2e4-2bea-1dfb2a8c2dd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.167092] env[63379]: DEBUG oslo_vmware.api [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779327, 'name': PowerOnVM_Task, 'duration_secs': 2.814119} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.167092] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1530.167092] env[63379]: INFO nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Took 11.02 seconds to spawn the instance on the hypervisor. [ 1530.167280] env[63379]: DEBUG nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1530.167964] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c67dff-bd65-45fd-99db-e9b05f9ef011 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.196044] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526569c5-e068-4020-7294-a9330e2eb53b, 'name': SearchDatastore_Task, 'duration_secs': 0.01003} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.196323] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.196561] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.196839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.253885] env[63379]: INFO nova.compute.manager [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] instance snapshotting [ 1530.256924] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b789747-186d-47aa-9cb1-44ad4d008add {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.278074] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352976bf-8a88-45a0-870e-79da58caab8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.371922] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 6e022c9a-642b-4d96-8195-e56809bbd7b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.390669] env[63379]: DEBUG oslo_concurrency.lockutils [req-3bd37a95-aebe-4e99-ab0d-22a291344f9e req-9a4a98cd-e03b-4726-b9a9-8d0b268d9d46 service nova] Releasing lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.631833] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529c2297-4ee1-f2e4-2bea-1dfb2a8c2dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.01049} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.632238] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.632494] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c439fe86-fc43-4c05-a4b7-3634a043269a/c439fe86-fc43-4c05-a4b7-3634a043269a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1530.632830] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.633048] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.633280] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1dfde1a-b57a-4b44-b06c-21ceb6add9de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.635309] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-100fb6c2-f13f-4abf-a347-0e1fd58b31b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.643278] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1530.643278] env[63379]: value = "task-1779329" [ 1530.643278] env[63379]: _type = "Task" [ 1530.643278] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.647580] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.647793] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.651169] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58bb82f0-258b-45a3-b50a-e60cf69f1f53 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.653591] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.656523] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1530.656523] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529aa9ee-d9ea-c6cd-3320-927ed1b7fe60" [ 1530.656523] env[63379]: _type = "Task" [ 1530.656523] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.664256] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529aa9ee-d9ea-c6cd-3320-927ed1b7fe60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.688912] env[63379]: INFO nova.compute.manager [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Took 36.08 seconds to build instance. [ 1530.789175] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1530.789540] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-44d8504b-5d9c-4f65-80ac-a59e4c1d1b6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.798560] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1530.798560] env[63379]: value = "task-1779330" [ 1530.798560] env[63379]: _type = "Task" [ 1530.798560] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.809591] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779330, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.875960] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f983d089-7cfc-46a5-8f8d-f49f67aef1da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.997216] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "2f98800d-800f-4ad7-bd65-f12879f02ce5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.997522] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.997741] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "2f98800d-800f-4ad7-bd65-f12879f02ce5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.997932] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.998133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.000558] env[63379]: INFO nova.compute.manager [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Terminating instance [ 1531.002197] env[63379]: DEBUG nova.compute.manager [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1531.002412] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.003329] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59647b2-7f54-4420-8283-e78a3edbafea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.011156] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1531.011754] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd9bc731-5370-4b72-89f8-81bc65d9b3f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.018076] env[63379]: DEBUG oslo_vmware.api [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1531.018076] env[63379]: value = "task-1779331" [ 1531.018076] env[63379]: _type = "Task" [ 1531.018076] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.026310] env[63379]: DEBUG oslo_vmware.api [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.061118] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.155327] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779329, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.168031] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529aa9ee-d9ea-c6cd-3320-927ed1b7fe60, 'name': SearchDatastore_Task, 'duration_secs': 0.028121} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.168206] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86403291-65e6-4660-9444-944c91c8e66f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.174828] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1531.174828] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5267225c-6f90-9ae2-a040-4b7d1ea9bc65" [ 1531.174828] env[63379]: _type = "Task" [ 1531.174828] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.183930] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5267225c-6f90-9ae2-a040-4b7d1ea9bc65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.190636] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e33cfa4e-3035-4600-b38a-0b37561cb513 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.601s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.192135] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.131s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.192299] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.192523] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.192704] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.194779] env[63379]: INFO nova.compute.manager [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Terminating instance [ 1531.201145] env[63379]: DEBUG nova.compute.manager [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1531.201145] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.201967] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff634c0-f90e-4284-a104-857cf0415bd9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.212619] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1531.213040] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8862561-7dec-4673-8c81-555cf6f7dc92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.221520] env[63379]: DEBUG oslo_vmware.api [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1531.221520] env[63379]: value = "task-1779332" [ 1531.221520] env[63379]: _type = "Task" [ 1531.221520] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.235965] env[63379]: DEBUG oslo_vmware.api [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.311438] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779330, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.380790] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 38be0e8d-188b-4a98-aedc-5d941b63c000 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1531.529885] env[63379]: DEBUG oslo_vmware.api [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779331, 'name': PowerOffVM_Task, 'duration_secs': 0.279804} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.530299] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1531.530549] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.530856] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fb5d5f6-f836-402a-9dc8-be45f37dfb62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.602737] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.603018] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.603228] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleting the datastore file [datastore1] 2f98800d-800f-4ad7-bd65-f12879f02ce5 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.603519] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-925fe094-93bf-4d10-b3e2-ffcab54519b1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.610108] env[63379]: DEBUG oslo_vmware.api [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1531.610108] env[63379]: value = "task-1779334" [ 1531.610108] env[63379]: _type = "Task" [ 1531.610108] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.618102] env[63379]: DEBUG oslo_vmware.api [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779334, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.653233] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779329, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742116} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.653510] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c439fe86-fc43-4c05-a4b7-3634a043269a/c439fe86-fc43-4c05-a4b7-3634a043269a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1531.653741] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1531.654130] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36c1d12a-3693-46cb-8138-a13574af8ead {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.660805] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1531.660805] env[63379]: value = "task-1779335" [ 1531.660805] env[63379]: _type = "Task" [ 1531.660805] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.669735] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.686196] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5267225c-6f90-9ae2-a040-4b7d1ea9bc65, 'name': SearchDatastore_Task, 'duration_secs': 0.063303} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.686542] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.686955] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 318355e9-b4cc-4645-ac51-b583d14e1134/318355e9-b4cc-4645-ac51-b583d14e1134.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1531.687236] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77d0dfb5-6677-491b-adb2-c94b9c65e486 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.694541] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1531.694541] env[63379]: value = "task-1779336" [ 1531.694541] env[63379]: _type = "Task" [ 1531.694541] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.695015] env[63379]: DEBUG nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1531.706371] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.733568] env[63379]: DEBUG oslo_vmware.api [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779332, 'name': PowerOffVM_Task, 'duration_secs': 0.270555} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.734912] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1531.735212] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.735520] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6761c648-714c-4c42-b8c8-3e7b271059f8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.811158] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779330, 'name': CreateSnapshot_Task, 'duration_secs': 0.773651} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.811460] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1531.812228] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250be2eb-a4c0-45a6-8b43-02c4677051e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.827241] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.828568] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.828568] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleting the datastore file [datastore1] e838f54f-99f2-4f39-a9d2-725be8a5b3ce {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.828568] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce34eabd-3d5a-445e-8941-8de77d489f4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.837372] env[63379]: DEBUG oslo_vmware.api [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for the task: (returnval){ [ 1531.837372] env[63379]: value = "task-1779338" [ 1531.837372] env[63379]: _type = "Task" [ 1531.837372] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.844344] env[63379]: DEBUG oslo_vmware.api [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.885265] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance ec1f7a44-7344-43fb-9d51-688731d8ce14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1532.126764] env[63379]: DEBUG oslo_vmware.api [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779334, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151954} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.127083] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.127308] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1532.127515] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1532.127722] env[63379]: INFO nova.compute.manager [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1532.127981] env[63379]: DEBUG oslo.service.loopingcall [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.128199] env[63379]: DEBUG nova.compute.manager [-] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1532.128297] env[63379]: DEBUG nova.network.neutron [-] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1532.171592] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069365} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.171926] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1532.172687] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b169158-7999-4a41-8c3f-95c450b6cf3c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.195921] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] c439fe86-fc43-4c05-a4b7-3634a043269a/c439fe86-fc43-4c05-a4b7-3634a043269a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1532.196337] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d37aa9f-bbe6-45b2-92ca-56b866f38500 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.225150] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779336, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.227041] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1532.227041] env[63379]: value = "task-1779339" [ 1532.227041] env[63379]: _type = "Task" [ 1532.227041] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.235654] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.336394] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1532.336795] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b8d11eb9-71ee-4606-8170-ff5ba1ea9d55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.345496] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.348835] env[63379]: DEBUG oslo_vmware.api [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Task: {'id': task-1779338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380768} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.349336] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.349427] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1532.349635] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1532.349954] env[63379]: INFO nova.compute.manager [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1532.350242] env[63379]: DEBUG oslo.service.loopingcall [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.351551] env[63379]: DEBUG nova.compute.manager [-] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1532.351677] env[63379]: DEBUG nova.network.neutron [-] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1532.354102] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1532.354102] env[63379]: value = "task-1779340" [ 1532.354102] env[63379]: _type = "Task" [ 1532.354102] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.364454] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779340, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.390290] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 2a996f06-542e-4f71-95a4-0f71097d1478 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1532.505313] env[63379]: DEBUG nova.compute.manager [req-13e0a154-2f94-4770-a7a7-08c23c2b326c req-d9e12339-c567-4abe-acc9-01b69665b646 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Received event network-vif-deleted-9f9986ae-7761-479b-b7eb-9d68c7c70e11 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1532.505313] env[63379]: INFO nova.compute.manager [req-13e0a154-2f94-4770-a7a7-08c23c2b326c req-d9e12339-c567-4abe-acc9-01b69665b646 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Neutron deleted interface 9f9986ae-7761-479b-b7eb-9d68c7c70e11; detaching it from the instance and deleting it from the info cache [ 1532.505313] env[63379]: DEBUG nova.network.neutron [req-13e0a154-2f94-4770-a7a7-08c23c2b326c req-d9e12339-c567-4abe-acc9-01b69665b646 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.707646] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779336, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.739432] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779339, 'name': ReconfigVM_Task, 'duration_secs': 0.321958} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.739761] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Reconfigured VM instance instance-0000002b to attach disk [datastore1] c439fe86-fc43-4c05-a4b7-3634a043269a/c439fe86-fc43-4c05-a4b7-3634a043269a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1532.740683] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9b53e62-449c-4852-8cf2-449be4cd7880 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.744580] env[63379]: DEBUG nova.compute.manager [req-89508d0b-8fc5-4c89-bf86-870bca9620a1 req-60204560-7ef4-469a-b5ae-527c0ce25b93 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Received event network-vif-deleted-6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1532.744785] env[63379]: INFO nova.compute.manager [req-89508d0b-8fc5-4c89-bf86-870bca9620a1 req-60204560-7ef4-469a-b5ae-527c0ce25b93 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Neutron deleted interface 6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d; detaching it from the instance and deleting it from the info cache [ 1532.744966] env[63379]: DEBUG nova.network.neutron [req-89508d0b-8fc5-4c89-bf86-870bca9620a1 req-60204560-7ef4-469a-b5ae-527c0ce25b93 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.752021] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1532.752021] env[63379]: value = "task-1779341" [ 1532.752021] env[63379]: _type = "Task" [ 1532.752021] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.762278] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779341, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.864485] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779340, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.892987] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance ac596f08-86a3-42e0-86e6-41a173fe868f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1532.893353] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1532.893543] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1532.957589] env[63379]: DEBUG nova.network.neutron [-] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.009023] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6b8b95a-c596-4dee-ba44-edc5c78c197d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.020637] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4649663-7b8d-4b91-b345-12a11d2a3767 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.057607] env[63379]: DEBUG nova.compute.manager [req-13e0a154-2f94-4770-a7a7-08c23c2b326c req-d9e12339-c567-4abe-acc9-01b69665b646 service nova] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Detach interface failed, port_id=9f9986ae-7761-479b-b7eb-9d68c7c70e11, reason: Instance 2f98800d-800f-4ad7-bd65-f12879f02ce5 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1533.208650] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779336, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.461418} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.209048] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 318355e9-b4cc-4645-ac51-b583d14e1134/318355e9-b4cc-4645-ac51-b583d14e1134.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1533.209203] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1533.209494] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d18ad0e-dc9d-4ae7-bd1e-9ad34d050b22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.217456] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1533.217456] env[63379]: value = "task-1779342" [ 1533.217456] env[63379]: _type = "Task" [ 1533.217456] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.222034] env[63379]: DEBUG nova.network.neutron [-] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.227439] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.248258] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2319b67e-7290-41e0-81b2-28d6d10c8c73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.261266] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eddc79b-da59-4956-b481-bd5a5ca3f051 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.278771] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779341, 'name': Rename_Task, 'duration_secs': 0.370745} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.280240] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.280240] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4492673d-3603-46a4-9699-f3e2721fb05a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.287621] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1533.287621] env[63379]: value = "task-1779343" [ 1533.287621] env[63379]: _type = "Task" [ 1533.287621] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.308442] env[63379]: DEBUG nova.compute.manager [req-89508d0b-8fc5-4c89-bf86-870bca9620a1 req-60204560-7ef4-469a-b5ae-527c0ce25b93 service nova] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Detach interface failed, port_id=6ffb0ce3-d1f7-4b60-b7be-bb0bd060175d, reason: Instance e838f54f-99f2-4f39-a9d2-725be8a5b3ce could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1533.315201] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.372106] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779340, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.418412] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353afa6b-d249-4a69-bc67-0ec44e9b5714 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.426332] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dda2e4e-b262-4811-a73e-0bdd7105f9d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.460098] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7df9889-2a36-4e2c-9f3b-405c71a19637 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.462901] env[63379]: INFO nova.compute.manager [-] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Took 1.33 seconds to deallocate network for instance. [ 1533.474022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af684dac-6956-4f59-ae4e-7632cf1346bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.491406] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1533.724038] env[63379]: INFO nova.compute.manager [-] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Took 1.37 seconds to deallocate network for instance. [ 1533.733040] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067909} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.734032] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1533.734332] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a82bea-7b0a-4cbf-9b6f-9f652a1c40a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.759267] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 318355e9-b4cc-4645-ac51-b583d14e1134/318355e9-b4cc-4645-ac51-b583d14e1134.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1533.760075] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ebcbf86-dcd1-4be2-99f5-a97dd5c999da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.782482] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1533.782482] env[63379]: value = "task-1779344" [ 1533.782482] env[63379]: _type = "Task" [ 1533.782482] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.796417] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779344, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.803072] env[63379]: DEBUG oslo_vmware.api [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779343, 'name': PowerOnVM_Task, 'duration_secs': 0.475935} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.803381] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1533.803685] env[63379]: INFO nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Took 9.71 seconds to spawn the instance on the hypervisor. [ 1533.803984] env[63379]: DEBUG nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1533.804816] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479e99c5-bc2f-45be-93f5-4fe2600ae40a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.869088] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779340, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.970037] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.014288] env[63379]: ERROR nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] [req-f39c0027-f388-4d09-b8e0-08e16b8b26c6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f39c0027-f388-4d09-b8e0-08e16b8b26c6"}]} [ 1534.030878] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1534.046730] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1534.046971] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1534.058926] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1534.079053] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1534.230839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.293204] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.328169] env[63379]: INFO nova.compute.manager [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Took 35.62 seconds to build instance. [ 1534.369482] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779340, 'name': CloneVM_Task, 'duration_secs': 1.954787} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.369772] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Created linked-clone VM from snapshot [ 1534.370567] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f16ca4-3739-44a1-8385-95328a66b730 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.380950] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Uploading image 3fd3ca89-fbce-4de3-ba04-2722a2b1c484 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1534.403231] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1534.403231] env[63379]: value = "vm-369344" [ 1534.403231] env[63379]: _type = "VirtualMachine" [ 1534.403231] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1534.404042] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4b849243-923b-4cd9-87f9-9cee83a826f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.411509] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lease: (returnval){ [ 1534.411509] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522cdefe-51ef-7198-6938-1970d5efef75" [ 1534.411509] env[63379]: _type = "HttpNfcLease" [ 1534.411509] env[63379]: } obtained for exporting VM: (result){ [ 1534.411509] env[63379]: value = "vm-369344" [ 1534.411509] env[63379]: _type = "VirtualMachine" [ 1534.411509] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1534.411755] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the lease: (returnval){ [ 1534.411755] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522cdefe-51ef-7198-6938-1970d5efef75" [ 1534.411755] env[63379]: _type = "HttpNfcLease" [ 1534.411755] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1534.418813] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1534.418813] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522cdefe-51ef-7198-6938-1970d5efef75" [ 1534.418813] env[63379]: _type = "HttpNfcLease" [ 1534.418813] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1534.518877] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55904eb3-f3e8-4c1f-832e-ba80c064d34b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.526987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668407c1-927a-4020-baec-d44d065a20a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.559435] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2314acb7-0bc5-45e5-ac2c-dca3fb40599a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.567575] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543659f1-474f-4081-a343-20a383a69f3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.581838] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1534.794720] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779344, 'name': ReconfigVM_Task, 'duration_secs': 0.576183} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.795712] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 318355e9-b4cc-4645-ac51-b583d14e1134/318355e9-b4cc-4645-ac51-b583d14e1134.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1534.795829] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7c689e6-61f9-42eb-bd13-f3e189784d48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.802728] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1534.802728] env[63379]: value = "task-1779346" [ 1534.802728] env[63379]: _type = "Task" [ 1534.802728] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.811067] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779346, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.830394] env[63379]: DEBUG oslo_concurrency.lockutils [None req-91e097bf-8240-4f91-bd0f-998abfa5d4d2 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.610s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.920614] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1534.920614] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522cdefe-51ef-7198-6938-1970d5efef75" [ 1534.920614] env[63379]: _type = "HttpNfcLease" [ 1534.920614] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1534.920933] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1534.920933] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522cdefe-51ef-7198-6938-1970d5efef75" [ 1534.920933] env[63379]: _type = "HttpNfcLease" [ 1534.920933] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1534.921716] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6ef1e7-5cc1-4357-9a7f-83bc2a31b053 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.929612] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524edc75-8d0b-90c3-e543-42446e11ac3b/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1534.929795] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524edc75-8d0b-90c3-e543-42446e11ac3b/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1535.018538] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-effe56ed-7388-4e5f-8617-7393915a1da5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.113172] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 69 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1535.113402] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 69 to 70 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1535.113552] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1535.312730] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779346, 'name': Rename_Task, 'duration_secs': 0.236591} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.313115] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1535.313282] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-192c6df1-dc60-4cfe-ba3a-b65edcc5cb0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.320753] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1535.320753] env[63379]: value = "task-1779347" [ 1535.320753] env[63379]: _type = "Task" [ 1535.320753] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.329437] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779347, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.333061] env[63379]: DEBUG nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1535.385374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a939bed8-c477-4e7e-8e50-0ba34749f1da tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "interface-c439fe86-fc43-4c05-a4b7-3634a043269a-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.385912] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a939bed8-c477-4e7e-8e50-0ba34749f1da tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "interface-c439fe86-fc43-4c05-a4b7-3634a043269a-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.386547] env[63379]: DEBUG nova.objects.instance [None req-a939bed8-c477-4e7e-8e50-0ba34749f1da tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lazy-loading 'flavor' on Instance uuid c439fe86-fc43-4c05-a4b7-3634a043269a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1535.619326] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1535.619326] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.840s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.620527] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.844s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.620527] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.622828] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.220s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.624373] env[63379]: INFO nova.compute.claims [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1535.627431] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.627675] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11274}} [ 1535.654817] env[63379]: INFO nova.scheduler.client.report [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Deleted allocations for instance 07cc8cd7-8368-41dd-ae13-01c8275cac9e [ 1535.833252] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779347, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.859744] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.891177] env[63379]: DEBUG nova.objects.instance [None req-a939bed8-c477-4e7e-8e50-0ba34749f1da tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lazy-loading 'pci_requests' on Instance uuid c439fe86-fc43-4c05-a4b7-3634a043269a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1536.138164] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] There are 21 instances to clean {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11283}} [ 1536.138561] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: bc7baa1a-f65d-41d4-ad86-de041fbb2306] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1536.170805] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8ea99e-33f0-4daa-b1e2-1115c27c15ba tempest-ServersTestMultiNic-1484800288 tempest-ServersTestMultiNic-1484800288-project-member] Lock "07cc8cd7-8368-41dd-ae13-01c8275cac9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.546s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.341690] env[63379]: DEBUG oslo_vmware.api [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779347, 'name': PowerOnVM_Task, 'duration_secs': 0.764939} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.341690] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1536.342143] env[63379]: INFO nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Took 9.53 seconds to spawn the instance on the hypervisor. [ 1536.342143] env[63379]: DEBUG nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1536.343055] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7af5870-9cc5-4134-a1a5-c0f62ffa7acc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.393500] env[63379]: DEBUG nova.objects.base [None req-a939bed8-c477-4e7e-8e50-0ba34749f1da tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1536.393731] env[63379]: DEBUG nova.network.neutron [None req-a939bed8-c477-4e7e-8e50-0ba34749f1da tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1536.488059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a939bed8-c477-4e7e-8e50-0ba34749f1da tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "interface-c439fe86-fc43-4c05-a4b7-3634a043269a-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.102s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.645336] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 48c0d20e-adc4-40a9-888c-ffea363f6edb] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1536.668337] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309b1af0-45e3-4ac0-9d29-886aa7431d44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.677337] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a75d2e-84fc-48d8-a165-71ee4e1e8178 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.712196] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684ca3dd-8685-421e-913b-d2c49a4dd5f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.721918] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52e86d2-c8f6-4835-acdf-84725e266424 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.737271] env[63379]: DEBUG nova.compute.provider_tree [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.873725] env[63379]: INFO nova.compute.manager [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Took 36.70 seconds to build instance. [ 1537.148646] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 76731b1b-af66-441b-8fe4-d5d7e7faf3ca] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1537.240915] env[63379]: DEBUG nova.scheduler.client.report [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1537.376909] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a73f2dd-f9a1-4700-afea-7c2c22968137 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.514s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.652174] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 8a7a3a54-ca4f-4860-a976-7d6b1212b9c9] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1537.746368] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.746911] env[63379]: DEBUG nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1537.750044] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.756s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.751511] env[63379]: INFO nova.compute.claims [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1537.879870] env[63379]: DEBUG nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1538.155880] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: c999d64e-3f5b-4854-8b92-6d0d17f49dd7] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1538.263021] env[63379]: DEBUG nova.compute.utils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1538.263021] env[63379]: DEBUG nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1538.263021] env[63379]: DEBUG nova.network.neutron [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1538.314870] env[63379]: DEBUG nova.policy [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb1c350756184d8fb157ea6f40e856ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbe6c82fa6ad4d7aa2e920bb4d17d5e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1538.406131] env[63379]: DEBUG nova.compute.manager [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1538.406131] env[63379]: DEBUG nova.compute.manager [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing instance network info cache due to event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1538.406131] env[63379]: DEBUG oslo_concurrency.lockutils [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] Acquiring lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.407554] env[63379]: DEBUG oslo_concurrency.lockutils [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] Acquired lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.407956] env[63379]: DEBUG nova.network.neutron [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1538.411160] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.661786] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: de671ba9-0d86-4f89-a6bd-ecea9ad0ba85] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1538.749795] env[63379]: DEBUG nova.network.neutron [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Successfully created port: 6af296da-afd1-49bb-b790-5fb012d68a2c {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1538.768547] env[63379]: DEBUG nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1538.972183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "c439fe86-fc43-4c05-a4b7-3634a043269a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.972183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.972183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "c439fe86-fc43-4c05-a4b7-3634a043269a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.972183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.972183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.974425] env[63379]: INFO nova.compute.manager [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Terminating instance [ 1538.976657] env[63379]: DEBUG nova.compute.manager [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1538.976999] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1538.978357] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa065707-0f83-442e-a15c-a7461a04927c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.991019] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1538.991019] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e883d012-6259-4d0f-a103-6f5fa7b5677f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.002023] env[63379]: DEBUG oslo_vmware.api [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1539.002023] env[63379]: value = "task-1779348" [ 1539.002023] env[63379]: _type = "Task" [ 1539.002023] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.013268] env[63379]: DEBUG oslo_vmware.api [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.164491] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 6b4e80fc-582f-432b-aa99-ec133127578e] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1539.310741] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04581354-47e8-426b-aaac-e4dd5838d5ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.321776] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58f131f-5bc6-4d7b-9ef0-fc164d5d2719 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.361394] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e6657a-687c-44d9-849d-9e73e58ced06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.371224] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f39b3d-50ad-46e3-bcec-7a992cbfc7db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.387531] env[63379]: DEBUG nova.compute.provider_tree [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1539.441689] env[63379]: DEBUG nova.network.neutron [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updated VIF entry in instance network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1539.442177] env[63379]: DEBUG nova.network.neutron [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [{"id": "01134024-43f6-41eb-b222-1e69cef1bfd4", "address": "fa:16:3e:25:e3:d2", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01134024-43", "ovs_interfaceid": "01134024-43f6-41eb-b222-1e69cef1bfd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.511951] env[63379]: DEBUG oslo_vmware.api [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779348, 'name': PowerOffVM_Task, 'duration_secs': 0.294655} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.512368] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1539.512600] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1539.512901] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e03613b8-e1ba-4f56-b542-6400c48da36d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.605662] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1539.605899] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1539.606104] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Deleting the datastore file [datastore1] c439fe86-fc43-4c05-a4b7-3634a043269a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1539.606394] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b243eeaf-c757-45b7-af96-e069fef26fd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.615251] env[63379]: DEBUG oslo_vmware.api [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for the task: (returnval){ [ 1539.615251] env[63379]: value = "task-1779350" [ 1539.615251] env[63379]: _type = "Task" [ 1539.615251] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.623988] env[63379]: DEBUG oslo_vmware.api [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.671772] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: d221329b-eee4-42f5-bb27-cf6af0386c04] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1539.779009] env[63379]: DEBUG nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1539.811226] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1539.811541] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1539.811720] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1539.811895] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1539.812059] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1539.812219] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1539.812434] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1539.812600] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1539.812770] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1539.812938] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1539.813135] env[63379]: DEBUG nova.virt.hardware [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1539.814042] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c543cfcc-d762-4e23-9930-43acb16a0ecb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.823436] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dd776d-a9c4-4342-9eea-6fdad45f752f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.891096] env[63379]: DEBUG nova.scheduler.client.report [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1539.945610] env[63379]: DEBUG oslo_concurrency.lockutils [req-1c75224d-52ed-4225-8a00-4794f9684fe3 req-8ceed1ba-a832-4921-82dd-cf1fed184c4c service nova] Releasing lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.126905] env[63379]: DEBUG oslo_vmware.api [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Task: {'id': task-1779350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188056} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.127196] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1540.127390] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1540.127576] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1540.127758] env[63379]: INFO nova.compute.manager [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1540.128009] env[63379]: DEBUG oslo.service.loopingcall [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.128224] env[63379]: DEBUG nova.compute.manager [-] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1540.128321] env[63379]: DEBUG nova.network.neutron [-] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1540.175754] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 571bb238-9cf3-475e-b596-a9609acc8696] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1540.386196] env[63379]: DEBUG nova.compute.manager [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1540.386426] env[63379]: DEBUG nova.compute.manager [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing instance network info cache due to event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1540.386658] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] Acquiring lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.386876] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] Acquired lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.386976] env[63379]: DEBUG nova.network.neutron [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1540.397839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.398396] env[63379]: DEBUG nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1540.401255] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.567s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.401502] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.404201] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.293s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.405829] env[63379]: INFO nova.compute.claims [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1540.433595] env[63379]: INFO nova.scheduler.client.report [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Deleted allocations for instance a78feafb-00bc-44c4-acd3-a36fb8a81767 [ 1540.654017] env[63379]: DEBUG nova.compute.manager [req-edfc91a0-bd04-479c-a2a9-4d0e759a3650 req-3ed8e4ad-51fd-42f7-9822-6dd0431a9d0f service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Received event network-vif-deleted-413ced3b-b54d-4b64-93a1-7a9b2b9857fc {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1540.654440] env[63379]: INFO nova.compute.manager [req-edfc91a0-bd04-479c-a2a9-4d0e759a3650 req-3ed8e4ad-51fd-42f7-9822-6dd0431a9d0f service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Neutron deleted interface 413ced3b-b54d-4b64-93a1-7a9b2b9857fc; detaching it from the instance and deleting it from the info cache [ 1540.654588] env[63379]: DEBUG nova.network.neutron [req-edfc91a0-bd04-479c-a2a9-4d0e759a3650 req-3ed8e4ad-51fd-42f7-9822-6dd0431a9d0f service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.679281] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aaaf4b06-ef84-41ba-8054-29582854a9f1] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1540.911673] env[63379]: DEBUG nova.compute.utils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1540.913285] env[63379]: DEBUG nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1540.916954] env[63379]: DEBUG nova.network.neutron [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1540.947428] env[63379]: DEBUG oslo_concurrency.lockutils [None req-093f700d-9e58-47b8-aaf3-6db145a6412b tempest-InstanceActionsTestJSON-1725331191 tempest-InstanceActionsTestJSON-1725331191-project-member] Lock "a78feafb-00bc-44c4-acd3-a36fb8a81767" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.128s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.956060] env[63379]: DEBUG nova.network.neutron [-] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.014946] env[63379]: DEBUG nova.network.neutron [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Successfully updated port: 6af296da-afd1-49bb-b790-5fb012d68a2c {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1541.031158] env[63379]: DEBUG nova.policy [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdb7e6e2801d4644a9684ccef5c4e172', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17efc8d00fd341d794e0458aaac75ea1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1541.159834] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a0531ea-7436-43a2-94ef-ecf66aa8ed3a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.175841] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec29736-b758-4089-be86-f1521949fee8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.193076] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 25090d85-cd10-44fc-aa9d-071ada14f249] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1541.221435] env[63379]: DEBUG nova.compute.manager [req-edfc91a0-bd04-479c-a2a9-4d0e759a3650 req-3ed8e4ad-51fd-42f7-9822-6dd0431a9d0f service nova] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Detach interface failed, port_id=413ced3b-b54d-4b64-93a1-7a9b2b9857fc, reason: Instance c439fe86-fc43-4c05-a4b7-3634a043269a could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1541.395596] env[63379]: DEBUG nova.network.neutron [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updated VIF entry in instance network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1541.396012] env[63379]: DEBUG nova.network.neutron [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [{"id": "01134024-43f6-41eb-b222-1e69cef1bfd4", "address": "fa:16:3e:25:e3:d2", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01134024-43", "ovs_interfaceid": "01134024-43f6-41eb-b222-1e69cef1bfd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.424673] env[63379]: DEBUG nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1541.460482] env[63379]: INFO nova.compute.manager [-] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Took 1.33 seconds to deallocate network for instance. [ 1541.517310] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "refresh_cache-650d4709-3cbc-4b9a-b165-66fa0af97c4d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.517310] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquired lock "refresh_cache-650d4709-3cbc-4b9a-b165-66fa0af97c4d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.517434] env[63379]: DEBUG nova.network.neutron [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1541.633931] env[63379]: DEBUG nova.network.neutron [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Successfully created port: 9c772f89-9b5d-4518-ac94-8d61ecb706db {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1541.698811] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: ae565930-1bbc-4e75-bfc1-25dbcfd2e999] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1541.901365] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea417a70-a77f-45c5-b9a0-4ef8eba84cb1 req-752126db-7be9-4df3-9758-1b4506c24180 service nova] Releasing lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.968761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.004451] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110893aa-245f-4b6e-a748-2c6db0c5f9e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.015999] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3359bb5-5b8f-4f8e-be0c-29a80b293c77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.051604] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d54e321-81cd-427e-86b6-3414934881a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.060169] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57d4a9f-5ebb-45d5-af58-30fd3d99af5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.075030] env[63379]: DEBUG nova.compute.provider_tree [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1542.083303] env[63379]: DEBUG nova.network.neutron [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1542.200679] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 5eb7cb5b-3ebf-4fc3-a34a-0c8cc22714fd] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1542.345110] env[63379]: DEBUG nova.network.neutron [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Updating instance_info_cache with network_info: [{"id": "6af296da-afd1-49bb-b790-5fb012d68a2c", "address": "fa:16:3e:22:f6:63", "network": {"id": "5a939869-22a0-4da5-bcaf-aadd175ade15", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1780017973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbe6c82fa6ad4d7aa2e920bb4d17d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6af296da-af", "ovs_interfaceid": "6af296da-afd1-49bb-b790-5fb012d68a2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1542.442432] env[63379]: DEBUG nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1542.472415] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1542.473533] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1542.473533] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1542.473663] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1542.473866] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1542.474176] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1542.474533] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1542.474803] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1542.475116] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1542.475450] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1542.475934] env[63379]: DEBUG nova.virt.hardware [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1542.477719] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3b82d3-17fc-466f-b560-1e451ef61981 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.488764] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3556326d-ad94-4140-a651-430a716a4ab7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.616439] env[63379]: DEBUG nova.scheduler.client.report [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1542.616968] env[63379]: DEBUG nova.compute.provider_tree [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 70 to 71 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1542.617914] env[63379]: DEBUG nova.compute.provider_tree [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1542.704376] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 30908171-e1b9-4e20-830e-419ff6d9a0fa] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1542.799414] env[63379]: DEBUG nova.compute.manager [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Received event network-changed-3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1542.800399] env[63379]: DEBUG nova.compute.manager [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Refreshing instance network info cache due to event network-changed-3c22cde0-746e-43ec-b075-e14c004043c4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1542.800774] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Acquiring lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.801015] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Acquired lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.801287] env[63379]: DEBUG nova.network.neutron [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Refreshing network info cache for port 3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1542.847978] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Releasing lock "refresh_cache-650d4709-3cbc-4b9a-b165-66fa0af97c4d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.848349] env[63379]: DEBUG nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Instance network_info: |[{"id": "6af296da-afd1-49bb-b790-5fb012d68a2c", "address": "fa:16:3e:22:f6:63", "network": {"id": "5a939869-22a0-4da5-bcaf-aadd175ade15", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1780017973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbe6c82fa6ad4d7aa2e920bb4d17d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6af296da-af", "ovs_interfaceid": "6af296da-afd1-49bb-b790-5fb012d68a2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1542.849148] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:f6:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a31c4b8-5b72-4f32-aab3-c4e963e684dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6af296da-afd1-49bb-b790-5fb012d68a2c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1542.857937] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Creating folder: Project (dbe6c82fa6ad4d7aa2e920bb4d17d5e0). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1542.857937] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f17add0-3f03-43bf-8439-f84ffc4aacb0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.872376] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Created folder: Project (dbe6c82fa6ad4d7aa2e920bb4d17d5e0) in parent group-v369214. [ 1542.872792] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Creating folder: Instances. Parent ref: group-v369345. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1542.872792] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a9d2c45-9670-4279-a235-13f3780e5e8c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.884750] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Created folder: Instances in parent group-v369345. [ 1542.885041] env[63379]: DEBUG oslo.service.loopingcall [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1542.887105] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1542.887105] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cc42558-3c8f-4b12-9772-17ddea6fae6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.909023] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1542.909023] env[63379]: value = "task-1779353" [ 1542.909023] env[63379]: _type = "Task" [ 1542.909023] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.916382] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779353, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.124103] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.719s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.125218] env[63379]: DEBUG nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1543.130091] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.909s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.132934] env[63379]: INFO nova.compute.claims [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1543.208672] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 55fb6899-0321-4bf2-bf3f-2e87dd479433] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1543.419907] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779353, 'name': CreateVM_Task, 'duration_secs': 0.395944} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.421960] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524edc75-8d0b-90c3-e543-42446e11ac3b/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1543.422421] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1543.423255] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad0f1b3-f6fd-448e-bf4e-c141b6c9e0a5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.426222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.426451] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.426727] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1543.427021] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff76a3b3-14ed-4d2e-8de2-c82c61c2c74b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.432253] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1543.432253] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa8f6c-5aa4-c489-8c84-a11ef3dd61d7" [ 1543.432253] env[63379]: _type = "Task" [ 1543.432253] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.434331] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524edc75-8d0b-90c3-e543-42446e11ac3b/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1543.434515] env[63379]: ERROR oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524edc75-8d0b-90c3-e543-42446e11ac3b/disk-0.vmdk due to incomplete transfer. [ 1543.438456] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0482d6da-492f-4241-8115-80fa160f021c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.446889] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa8f6c-5aa4-c489-8c84-a11ef3dd61d7, 'name': SearchDatastore_Task, 'duration_secs': 0.011472} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.447602] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.448121] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1543.448121] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.448259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.448460] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1543.448741] env[63379]: DEBUG oslo_vmware.rw_handles [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/524edc75-8d0b-90c3-e543-42446e11ac3b/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1543.448913] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Uploaded image 3fd3ca89-fbce-4de3-ba04-2722a2b1c484 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1543.450964] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1543.451226] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80826979-4dce-4286-8c07-35de2c044bf1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.452977] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5a7cc74b-4a1b-46bf-a925-ee9b4f56cc48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.460556] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1543.460556] env[63379]: value = "task-1779354" [ 1543.460556] env[63379]: _type = "Task" [ 1543.460556] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.465909] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1543.465909] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1543.466405] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbf6f282-4a93-475d-8ccb-5dc27f9c25a5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.471389] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779354, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.474789] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1543.474789] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef9626-a95e-242e-54b2-858363774bfd" [ 1543.474789] env[63379]: _type = "Task" [ 1543.474789] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.483284] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef9626-a95e-242e-54b2-858363774bfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.642423] env[63379]: DEBUG nova.compute.utils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1543.643964] env[63379]: DEBUG nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1543.644160] env[63379]: DEBUG nova.network.neutron [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1543.712332] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: a6f7c217-a493-403d-b776-870df4575f2a] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1543.752338] env[63379]: DEBUG nova.policy [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3c83a210c114418b691aca3e1afb315', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '692581dc5dda4b3b94565dadcd06ec38', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1543.786180] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "318355e9-b4cc-4645-ac51-b583d14e1134" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.786501] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.786714] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.786900] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.787087] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.789187] env[63379]: INFO nova.compute.manager [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Terminating instance [ 1543.793742] env[63379]: DEBUG nova.compute.manager [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1543.795483] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1543.798500] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9991e4dd-878c-489a-93f2-f566a340b2eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.806313] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1543.806586] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c1e4a1b-1485-438d-b309-e37f1114460e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.813829] env[63379]: DEBUG oslo_vmware.api [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1543.813829] env[63379]: value = "task-1779355" [ 1543.813829] env[63379]: _type = "Task" [ 1543.813829] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.825230] env[63379]: DEBUG oslo_vmware.api [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.973662] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779354, 'name': Destroy_Task, 'duration_secs': 0.356585} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.974097] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Destroyed the VM [ 1543.974286] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1543.974552] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7efc480c-8cb8-48e3-83bb-9010998347c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.992329] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1543.992329] env[63379]: value = "task-1779356" [ 1543.992329] env[63379]: _type = "Task" [ 1543.992329] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.993698] env[63379]: DEBUG nova.compute.manager [req-d335c195-b470-4e97-8f50-308bddf8d922 req-da47825d-0ee0-4deb-b849-f149eecaddd3 service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Received event network-vif-plugged-9c772f89-9b5d-4518-ac94-8d61ecb706db {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1543.993927] env[63379]: DEBUG oslo_concurrency.lockutils [req-d335c195-b470-4e97-8f50-308bddf8d922 req-da47825d-0ee0-4deb-b849-f149eecaddd3 service nova] Acquiring lock "5aa36799-251b-4933-8ccd-8125995b1f8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.994107] env[63379]: DEBUG oslo_concurrency.lockutils [req-d335c195-b470-4e97-8f50-308bddf8d922 req-da47825d-0ee0-4deb-b849-f149eecaddd3 service nova] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.994290] env[63379]: DEBUG oslo_concurrency.lockutils [req-d335c195-b470-4e97-8f50-308bddf8d922 req-da47825d-0ee0-4deb-b849-f149eecaddd3 service nova] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.994506] env[63379]: DEBUG nova.compute.manager [req-d335c195-b470-4e97-8f50-308bddf8d922 req-da47825d-0ee0-4deb-b849-f149eecaddd3 service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] No waiting events found dispatching network-vif-plugged-9c772f89-9b5d-4518-ac94-8d61ecb706db {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1543.994682] env[63379]: WARNING nova.compute.manager [req-d335c195-b470-4e97-8f50-308bddf8d922 req-da47825d-0ee0-4deb-b849-f149eecaddd3 service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Received unexpected event network-vif-plugged-9c772f89-9b5d-4518-ac94-8d61ecb706db for instance with vm_state building and task_state spawning. [ 1543.995795] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef9626-a95e-242e-54b2-858363774bfd, 'name': SearchDatastore_Task, 'duration_secs': 0.009139} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.001350] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea7294bb-28e0-4234-aeb2-f5afbd95724e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.010916] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1544.010916] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a41d66-d1a6-97ed-313b-7c60016d5915" [ 1544.010916] env[63379]: _type = "Task" [ 1544.010916] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.018880] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779356, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.026694] env[63379]: DEBUG nova.network.neutron [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Successfully updated port: 9c772f89-9b5d-4518-ac94-8d61ecb706db {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1544.035019] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a41d66-d1a6-97ed-313b-7c60016d5915, 'name': SearchDatastore_Task, 'duration_secs': 0.01047} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.035019] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.035019] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 650d4709-3cbc-4b9a-b165-66fa0af97c4d/650d4709-3cbc-4b9a-b165-66fa0af97c4d.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1544.035019] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ee9534a-a5a7-4ab1-911e-c3ddf439c81f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.041455] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1544.041455] env[63379]: value = "task-1779357" [ 1544.041455] env[63379]: _type = "Task" [ 1544.041455] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.051999] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779357, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.088806] env[63379]: DEBUG nova.network.neutron [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updated VIF entry in instance network info cache for port 3c22cde0-746e-43ec-b075-e14c004043c4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1544.088806] env[63379]: DEBUG nova.network.neutron [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updating instance_info_cache with network_info: [{"id": "3c22cde0-746e-43ec-b075-e14c004043c4", "address": "fa:16:3e:2b:70:a9", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c22cde0-74", "ovs_interfaceid": "3c22cde0-746e-43ec-b075-e14c004043c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.150118] env[63379]: DEBUG nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1544.215064] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0edadcca-042e-440b-985b-6338e20265fa] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1544.329793] env[63379]: DEBUG oslo_vmware.api [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779355, 'name': PowerOffVM_Task, 'duration_secs': 0.191775} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.333162] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1544.333473] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1544.334969] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-978cee32-b86f-4070-b960-15b1f01043fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.431564] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1544.434023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1544.434023] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Deleting the datastore file [datastore1] 318355e9-b4cc-4645-ac51-b583d14e1134 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1544.434023] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01332f6c-ed3d-4099-aa9f-1219d5a7dec5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.442837] env[63379]: DEBUG oslo_vmware.api [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1544.442837] env[63379]: value = "task-1779359" [ 1544.442837] env[63379]: _type = "Task" [ 1544.442837] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.460513] env[63379]: DEBUG oslo_vmware.api [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779359, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.508455] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779356, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.527159] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "refresh_cache-5aa36799-251b-4933-8ccd-8125995b1f8b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.527374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquired lock "refresh_cache-5aa36799-251b-4933-8ccd-8125995b1f8b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.527592] env[63379]: DEBUG nova.network.neutron [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.535808] env[63379]: DEBUG nova.network.neutron [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Successfully created port: ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1544.556670] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779357, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494368} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.556961] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 650d4709-3cbc-4b9a-b165-66fa0af97c4d/650d4709-3cbc-4b9a-b165-66fa0af97c4d.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1544.557203] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1544.557474] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a115c607-dc07-4522-b643-576ce260e012 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.567960] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1544.567960] env[63379]: value = "task-1779360" [ 1544.567960] env[63379]: _type = "Task" [ 1544.567960] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.577890] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779360, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.592911] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Releasing lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.593307] env[63379]: DEBUG nova.compute.manager [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Received event network-vif-plugged-6af296da-afd1-49bb-b790-5fb012d68a2c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1544.593562] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Acquiring lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.593824] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.594137] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.594372] env[63379]: DEBUG nova.compute.manager [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] No waiting events found dispatching network-vif-plugged-6af296da-afd1-49bb-b790-5fb012d68a2c {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1544.594612] env[63379]: WARNING nova.compute.manager [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Received unexpected event network-vif-plugged-6af296da-afd1-49bb-b790-5fb012d68a2c for instance with vm_state building and task_state spawning. [ 1544.594838] env[63379]: DEBUG nova.compute.manager [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Received event network-changed-6af296da-afd1-49bb-b790-5fb012d68a2c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1544.595060] env[63379]: DEBUG nova.compute.manager [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Refreshing instance network info cache due to event network-changed-6af296da-afd1-49bb-b790-5fb012d68a2c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1544.595375] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Acquiring lock "refresh_cache-650d4709-3cbc-4b9a-b165-66fa0af97c4d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.595568] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Acquired lock "refresh_cache-650d4709-3cbc-4b9a-b165-66fa0af97c4d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.595767] env[63379]: DEBUG nova.network.neutron [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Refreshing network info cache for port 6af296da-afd1-49bb-b790-5fb012d68a2c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1544.720478] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: efc5b3b6-bed4-484c-8a0c-65810747382d] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1544.788124] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a96575-445a-46d9-af71-bac698289669 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.796988] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0c153a-eaf6-4f0e-8b7c-27dc6fd740a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.834968] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dec5b21-e473-492e-ada4-0083003e0558 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.846339] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d11d69a-9e7a-477a-ba32-36c9e6096b84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.866618] env[63379]: DEBUG nova.compute.provider_tree [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.958464] env[63379]: DEBUG oslo_vmware.api [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779359, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203172} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.959738] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.959738] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1544.959738] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1544.959738] env[63379]: INFO nova.compute.manager [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1544.959738] env[63379]: DEBUG oslo.service.loopingcall [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.960156] env[63379]: DEBUG nova.compute.manager [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1544.960156] env[63379]: DEBUG nova.network.neutron [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1545.008438] env[63379]: DEBUG oslo_vmware.api [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779356, 'name': RemoveSnapshot_Task, 'duration_secs': 0.750232} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.008873] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1545.009158] env[63379]: INFO nova.compute.manager [None req-cb38d75b-9388-4dee-be85-b7c91f68048e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Took 14.75 seconds to snapshot the instance on the hypervisor. [ 1545.084455] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779360, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071951} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.084455] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1545.085337] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd793fb-d2ba-473d-aea5-001d04219886 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.118902] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 650d4709-3cbc-4b9a-b165-66fa0af97c4d/650d4709-3cbc-4b9a-b165-66fa0af97c4d.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1545.119193] env[63379]: DEBUG nova.network.neutron [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1545.121336] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dff1b6a-bba5-40a9-b90f-5810e05de92f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.147422] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1545.147422] env[63379]: value = "task-1779361" [ 1545.147422] env[63379]: _type = "Task" [ 1545.147422] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.157116] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779361, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.167122] env[63379]: DEBUG nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1545.168293] env[63379]: DEBUG nova.compute.manager [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Received event network-changed-3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1545.168522] env[63379]: DEBUG nova.compute.manager [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Refreshing instance network info cache due to event network-changed-3c22cde0-746e-43ec-b075-e14c004043c4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1545.168790] env[63379]: DEBUG oslo_concurrency.lockutils [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] Acquiring lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.168947] env[63379]: DEBUG oslo_concurrency.lockutils [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] Acquired lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.169158] env[63379]: DEBUG nova.network.neutron [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Refreshing network info cache for port 3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1545.197645] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1545.197929] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1545.198106] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1545.198303] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1545.198465] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1545.198622] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1545.198831] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1545.199642] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1545.199642] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1545.199642] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1545.199642] env[63379]: DEBUG nova.virt.hardware [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1545.200404] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe571289-3111-4d45-8579-6ed70dcdb911 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.210030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a1ff21-bd40-4e14-ad42-e0e82b32388a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.225425] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: c7da2ae8-5e09-43d0-9cf5-f926b47fbc0c] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1545.372283] env[63379]: DEBUG nova.scheduler.client.report [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1545.660815] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779361, 'name': ReconfigVM_Task, 'duration_secs': 0.280234} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.661188] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 650d4709-3cbc-4b9a-b165-66fa0af97c4d/650d4709-3cbc-4b9a-b165-66fa0af97c4d.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1545.663768] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c66535cc-31fa-4860-a6f3-ad7dd66b9a08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.671287] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1545.671287] env[63379]: value = "task-1779362" [ 1545.671287] env[63379]: _type = "Task" [ 1545.671287] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.683174] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779362, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.699509] env[63379]: INFO nova.network.neutron [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Port 3c22cde0-746e-43ec-b075-e14c004043c4 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1545.699509] env[63379]: DEBUG nova.network.neutron [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.730243] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 724c7a22-1833-4dc5-ab38-a11498a83ab8] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1545.731538] env[63379]: DEBUG nova.network.neutron [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Updating instance_info_cache with network_info: [{"id": "9c772f89-9b5d-4518-ac94-8d61ecb706db", "address": "fa:16:3e:32:7b:ad", "network": {"id": "d0d8817a-9a4d-408c-8345-f1802a107932", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1328153562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17efc8d00fd341d794e0458aaac75ea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c772f89-9b", "ovs_interfaceid": "9c772f89-9b5d-4518-ac94-8d61ecb706db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.877789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.880034] env[63379]: DEBUG nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1545.880972] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.615s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.883355] env[63379]: INFO nova.compute.claims [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1545.907426] env[63379]: DEBUG nova.network.neutron [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Updated VIF entry in instance network info cache for port 6af296da-afd1-49bb-b790-5fb012d68a2c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1545.907873] env[63379]: DEBUG nova.network.neutron [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Updating instance_info_cache with network_info: [{"id": "6af296da-afd1-49bb-b790-5fb012d68a2c", "address": "fa:16:3e:22:f6:63", "network": {"id": "5a939869-22a0-4da5-bcaf-aadd175ade15", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1780017973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbe6c82fa6ad4d7aa2e920bb4d17d5e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a31c4b8-5b72-4f32-aab3-c4e963e684dd", "external-id": "nsx-vlan-transportzone-805", "segmentation_id": 805, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6af296da-af", "ovs_interfaceid": "6af296da-afd1-49bb-b790-5fb012d68a2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.183667] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779362, 'name': Rename_Task, 'duration_secs': 0.154432} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.184294] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1546.184629] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1e5b684-b725-4ae8-a9b0-7043bd1a9744 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.192818] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1546.192818] env[63379]: value = "task-1779363" [ 1546.192818] env[63379]: _type = "Task" [ 1546.192818] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.203091] env[63379]: DEBUG oslo_concurrency.lockutils [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] Releasing lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.203520] env[63379]: DEBUG nova.compute.manager [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1546.203744] env[63379]: DEBUG nova.compute.manager [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing instance network info cache due to event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1546.204250] env[63379]: DEBUG oslo_concurrency.lockutils [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] Acquiring lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.204250] env[63379]: DEBUG oslo_concurrency.lockutils [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] Acquired lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.204346] env[63379]: DEBUG nova.network.neutron [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.206208] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779363, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.237914] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0aab61e4-c055-4872-973a-20fa6802ec10] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1546.243425] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Releasing lock "refresh_cache-5aa36799-251b-4933-8ccd-8125995b1f8b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.243619] env[63379]: DEBUG nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Instance network_info: |[{"id": "9c772f89-9b5d-4518-ac94-8d61ecb706db", "address": "fa:16:3e:32:7b:ad", "network": {"id": "d0d8817a-9a4d-408c-8345-f1802a107932", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1328153562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17efc8d00fd341d794e0458aaac75ea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c772f89-9b", "ovs_interfaceid": "9c772f89-9b5d-4518-ac94-8d61ecb706db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1546.247360] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:7b:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68add7d6-c025-46fa-84d3-9c589adb63e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c772f89-9b5d-4518-ac94-8d61ecb706db', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1546.258637] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Creating folder: Project (17efc8d00fd341d794e0458aaac75ea1). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1546.259839] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7261a35-1294-4814-a688-757c2353cd6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.271370] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Created folder: Project (17efc8d00fd341d794e0458aaac75ea1) in parent group-v369214. [ 1546.271611] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Creating folder: Instances. Parent ref: group-v369348. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1546.272134] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7913d0a7-4ffd-45fe-92b7-5ab2c8e906e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.286436] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Created folder: Instances in parent group-v369348. [ 1546.286436] env[63379]: DEBUG oslo.service.loopingcall [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1546.286436] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1546.286436] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fe9b237-396d-4279-bfdb-57c0adc77918 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.304750] env[63379]: DEBUG nova.network.neutron [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.312616] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1546.312616] env[63379]: value = "task-1779366" [ 1546.312616] env[63379]: _type = "Task" [ 1546.312616] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.327882] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779366, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.390023] env[63379]: DEBUG nova.compute.utils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1546.396149] env[63379]: DEBUG nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1546.396363] env[63379]: DEBUG nova.network.neutron [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1546.411184] env[63379]: DEBUG oslo_concurrency.lockutils [req-bd7bbcf2-48af-488f-9505-a2852facbf44 req-f55522ee-fe9c-4f68-bab0-7effdb1c7725 service nova] Releasing lock "refresh_cache-650d4709-3cbc-4b9a-b165-66fa0af97c4d" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.442472] env[63379]: DEBUG nova.policy [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9162483675d540dfb8551206627b50e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '767980ba969142098ccbdf031f6e62a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1546.521791] env[63379]: DEBUG nova.network.neutron [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Successfully updated port: ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1546.711961] env[63379]: DEBUG oslo_vmware.api [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779363, 'name': PowerOnVM_Task, 'duration_secs': 0.483331} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.711961] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1546.712303] env[63379]: INFO nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Took 6.93 seconds to spawn the instance on the hypervisor. [ 1546.713267] env[63379]: DEBUG nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1546.714144] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541a5f44-b643-4f20-ad50-76186da726d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.727921] env[63379]: DEBUG nova.compute.manager [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Received event network-changed-9c772f89-9b5d-4518-ac94-8d61ecb706db {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1546.728225] env[63379]: DEBUG nova.compute.manager [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Refreshing instance network info cache due to event network-changed-9c772f89-9b5d-4518-ac94-8d61ecb706db. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1546.728681] env[63379]: DEBUG oslo_concurrency.lockutils [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] Acquiring lock "refresh_cache-5aa36799-251b-4933-8ccd-8125995b1f8b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.728681] env[63379]: DEBUG oslo_concurrency.lockutils [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] Acquired lock "refresh_cache-5aa36799-251b-4933-8ccd-8125995b1f8b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.729670] env[63379]: DEBUG nova.network.neutron [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Refreshing network info cache for port 9c772f89-9b5d-4518-ac94-8d61ecb706db {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.741209] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1546.741479] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances with incomplete migration {{(pid=63379) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11312}} [ 1546.776214] env[63379]: DEBUG nova.network.neutron [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Successfully created port: fee236c0-9eaa-44e8-b51c-e97f6f003dad {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.809153] env[63379]: INFO nova.compute.manager [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Took 1.85 seconds to deallocate network for instance. [ 1546.831773] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779366, 'name': CreateVM_Task, 'duration_secs': 0.351608} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.831773] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1546.831773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.831773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.831773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1546.831773] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9adf700a-29e7-421c-82c8-81263564ecbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.840995] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1546.840995] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524945fd-f005-b7ba-c88c-a10ada16e9bd" [ 1546.840995] env[63379]: _type = "Task" [ 1546.840995] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.853937] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524945fd-f005-b7ba-c88c-a10ada16e9bd, 'name': SearchDatastore_Task, 'duration_secs': 0.010728} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.854821] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.855075] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.855341] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.855488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.855697] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.856803] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3c4b8b4-9eba-432c-a4c9-f5d066c58c5e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.867351] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.867503] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1546.868324] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d3e759f-d725-4d50-958c-da3f5a70d3cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.875166] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1546.875166] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520acc46-1f30-9cd4-b755-cb2036c7d6b2" [ 1546.875166] env[63379]: _type = "Task" [ 1546.875166] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.884283] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520acc46-1f30-9cd4-b755-cb2036c7d6b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.897091] env[63379]: DEBUG nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1547.024703] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.026054] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquired lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.026054] env[63379]: DEBUG nova.network.neutron [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1547.036551] env[63379]: DEBUG nova.network.neutron [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updated VIF entry in instance network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.036960] env[63379]: DEBUG nova.network.neutron [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [{"id": "01134024-43f6-41eb-b222-1e69cef1bfd4", "address": "fa:16:3e:25:e3:d2", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01134024-43", "ovs_interfaceid": "01134024-43f6-41eb-b222-1e69cef1bfd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.236281] env[63379]: INFO nova.compute.manager [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Took 43.87 seconds to build instance. [ 1547.243633] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1547.322883] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.394406] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520acc46-1f30-9cd4-b755-cb2036c7d6b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010531} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.397018] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef144be9-3f34-47cc-a075-667f6642ceb0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.402126] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1547.402126] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234b7b7-3cac-5886-6c0e-55d06ab31c52" [ 1547.402126] env[63379]: _type = "Task" [ 1547.402126] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.418287] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234b7b7-3cac-5886-6c0e-55d06ab31c52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.499657] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1254e65-e3ee-4e08-b215-993428c8dbed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.509371] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b163af6d-f533-4c16-b79f-d53876669591 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.544962] env[63379]: DEBUG oslo_concurrency.lockutils [req-9fca9a62-a949-43cd-b6fe-b20be412c91e req-c54ccc71-be2f-46f5-adcd-cffcab447699 service nova] Releasing lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.550456] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92581517-e93c-4a99-ba96-76a8c3cb375e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.557806] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dac600-6ba6-418f-b367-b5d919e903fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.572776] env[63379]: DEBUG nova.compute.provider_tree [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1547.591039] env[63379]: DEBUG nova.network.neutron [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Updated VIF entry in instance network info cache for port 9c772f89-9b5d-4518-ac94-8d61ecb706db. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.592212] env[63379]: DEBUG nova.network.neutron [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Updating instance_info_cache with network_info: [{"id": "9c772f89-9b5d-4518-ac94-8d61ecb706db", "address": "fa:16:3e:32:7b:ad", "network": {"id": "d0d8817a-9a4d-408c-8345-f1802a107932", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1328153562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17efc8d00fd341d794e0458aaac75ea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68add7d6-c025-46fa-84d3-9c589adb63e4", "external-id": "nsx-vlan-transportzone-961", "segmentation_id": 961, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c772f89-9b", "ovs_interfaceid": "9c772f89-9b5d-4518-ac94-8d61ecb706db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.593401] env[63379]: DEBUG nova.network.neutron [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1547.743854] env[63379]: DEBUG oslo_concurrency.lockutils [None req-42d05773-fe02-45e3-bba3-da73f9a60016 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.452s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.765834] env[63379]: DEBUG nova.network.neutron [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Updating instance_info_cache with network_info: [{"id": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "address": "fa:16:3e:db:bf:0c", "network": {"id": "ed24d0da-9874-4940-bc5e-1aa29c68ce84", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2101419788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "692581dc5dda4b3b94565dadcd06ec38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8dbca6-e4", "ovs_interfaceid": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.832080] env[63379]: DEBUG nova.compute.manager [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Received event network-vif-plugged-ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1547.832240] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Acquiring lock "158fe346-93f5-422b-877a-8423547da58f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.832794] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Lock "158fe346-93f5-422b-877a-8423547da58f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.833062] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Lock "158fe346-93f5-422b-877a-8423547da58f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.833211] env[63379]: DEBUG nova.compute.manager [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] No waiting events found dispatching network-vif-plugged-ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1547.833376] env[63379]: WARNING nova.compute.manager [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Received unexpected event network-vif-plugged-ce8dbca6-e4fa-47a3-b501-18973a50219c for instance with vm_state building and task_state spawning. [ 1547.833537] env[63379]: DEBUG nova.compute.manager [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1547.833691] env[63379]: DEBUG nova.compute.manager [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing instance network info cache due to event network-changed-01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1547.834101] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Acquiring lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.834259] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Acquired lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.834422] env[63379]: DEBUG nova.network.neutron [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Refreshing network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1547.844094] env[63379]: DEBUG nova.compute.manager [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1547.846145] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f515ea-92c1-4056-8a18-c6eaff9f33f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.917653] env[63379]: DEBUG nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1547.920049] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5234b7b7-3cac-5886-6c0e-55d06ab31c52, 'name': SearchDatastore_Task, 'duration_secs': 0.02307} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.920792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.921885] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5aa36799-251b-4933-8ccd-8125995b1f8b/5aa36799-251b-4933-8ccd-8125995b1f8b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1547.921994] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-100c3f65-5eec-4069-a1ec-a9ef367c6b79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.934429] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1547.934429] env[63379]: value = "task-1779367" [ 1547.934429] env[63379]: _type = "Task" [ 1547.934429] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.948762] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779367, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.955797] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1547.956162] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1547.956262] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1547.956477] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1547.956759] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1547.956759] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1547.956976] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1547.957493] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1547.957689] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1547.957866] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1547.958060] env[63379]: DEBUG nova.virt.hardware [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1547.959160] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec17a79-e91a-46c4-9d5b-ed869b2ac33e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.968538] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4607c96-4781-4ecf-a45a-21360d3930ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.077213] env[63379]: DEBUG nova.scheduler.client.report [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1548.094963] env[63379]: DEBUG oslo_concurrency.lockutils [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] Releasing lock "refresh_cache-5aa36799-251b-4933-8ccd-8125995b1f8b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.094963] env[63379]: DEBUG nova.compute.manager [req-dca2a9c7-1b44-465f-83a0-1b36f24637f2 req-8b3a6dc8-526b-41cd-9560-ada69c4e7caf service nova] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Received event network-vif-deleted-3c22cde0-746e-43ec-b075-e14c004043c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1548.240888] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "318355e9-b4cc-4645-ac51-b583d14e1134" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.252318] env[63379]: DEBUG nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1548.269307] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Releasing lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.269307] env[63379]: DEBUG nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Instance network_info: |[{"id": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "address": "fa:16:3e:db:bf:0c", "network": {"id": "ed24d0da-9874-4940-bc5e-1aa29c68ce84", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2101419788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "692581dc5dda4b3b94565dadcd06ec38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8dbca6-e4", "ovs_interfaceid": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1548.269307] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:bf:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce8dbca6-e4fa-47a3-b501-18973a50219c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1548.277358] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Creating folder: Project (692581dc5dda4b3b94565dadcd06ec38). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1548.277593] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e76db0bc-01ba-4eb5-94ce-84f2b621a4c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.294581] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Created folder: Project (692581dc5dda4b3b94565dadcd06ec38) in parent group-v369214. [ 1548.294581] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Creating folder: Instances. Parent ref: group-v369351. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1548.294581] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcb8be01-29d5-4c2a-8eb9-13374e5a6617 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.306066] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Created folder: Instances in parent group-v369351. [ 1548.309667] env[63379]: DEBUG oslo.service.loopingcall [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1548.311064] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158fe346-93f5-422b-877a-8423547da58f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1548.313872] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f682427-4c4e-4d0d-b7a3-46efd0755529 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.330946] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.331208] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.339212] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1548.339212] env[63379]: value = "task-1779370" [ 1548.339212] env[63379]: _type = "Task" [ 1548.339212] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.348137] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779370, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.360486] env[63379]: INFO nova.compute.manager [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] instance snapshotting [ 1548.365573] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2328091-9dc6-42e3-94a6-22d5546ad618 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.390312] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed2a3ba-219f-42ed-9de7-52dfd54ccf27 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.448210] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779367, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.524121] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.524383] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.524473] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.524760] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.524884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.531196] env[63379]: INFO nova.compute.manager [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Terminating instance [ 1548.533303] env[63379]: DEBUG nova.compute.manager [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1548.533517] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1548.534488] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e869cd2-36da-4b7a-9178-998d86d2dce5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.545214] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1548.545507] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee410257-4c06-40b7-9f3b-d3ed4d0f1959 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.553838] env[63379]: DEBUG oslo_vmware.api [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1548.553838] env[63379]: value = "task-1779371" [ 1548.553838] env[63379]: _type = "Task" [ 1548.553838] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.563535] env[63379]: DEBUG oslo_vmware.api [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.584239] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.703s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.584759] env[63379]: DEBUG nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1548.587513] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.753s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.587714] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.589776] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.152s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.591265] env[63379]: INFO nova.compute.claims [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1548.636391] env[63379]: INFO nova.scheduler.client.report [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Deleted allocations for instance bf0dd3cf-684c-4378-a89c-5b9f16df062d [ 1548.764591] env[63379]: DEBUG nova.compute.manager [req-9ed27d2e-0e2d-4fc9-9cb7-b6f8621dc3b3 req-27292e0c-ed02-4153-88b9-3e3aae188c09 service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Received event network-vif-plugged-fee236c0-9eaa-44e8-b51c-e97f6f003dad {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1548.764844] env[63379]: DEBUG oslo_concurrency.lockutils [req-9ed27d2e-0e2d-4fc9-9cb7-b6f8621dc3b3 req-27292e0c-ed02-4153-88b9-3e3aae188c09 service nova] Acquiring lock "f10fe64d-a09e-488a-b609-3e38922cf2e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.765142] env[63379]: DEBUG oslo_concurrency.lockutils [req-9ed27d2e-0e2d-4fc9-9cb7-b6f8621dc3b3 req-27292e0c-ed02-4153-88b9-3e3aae188c09 service nova] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.765277] env[63379]: DEBUG oslo_concurrency.lockutils [req-9ed27d2e-0e2d-4fc9-9cb7-b6f8621dc3b3 req-27292e0c-ed02-4153-88b9-3e3aae188c09 service nova] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.765442] env[63379]: DEBUG nova.compute.manager [req-9ed27d2e-0e2d-4fc9-9cb7-b6f8621dc3b3 req-27292e0c-ed02-4153-88b9-3e3aae188c09 service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] No waiting events found dispatching network-vif-plugged-fee236c0-9eaa-44e8-b51c-e97f6f003dad {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1548.765611] env[63379]: WARNING nova.compute.manager [req-9ed27d2e-0e2d-4fc9-9cb7-b6f8621dc3b3 req-27292e0c-ed02-4153-88b9-3e3aae188c09 service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Received unexpected event network-vif-plugged-fee236c0-9eaa-44e8-b51c-e97f6f003dad for instance with vm_state building and task_state spawning. [ 1548.775092] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.787789] env[63379]: DEBUG nova.network.neutron [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updated VIF entry in instance network info cache for port 01134024-43f6-41eb-b222-1e69cef1bfd4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1548.788339] env[63379]: DEBUG nova.network.neutron [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [{"id": "01134024-43f6-41eb-b222-1e69cef1bfd4", "address": "fa:16:3e:25:e3:d2", "network": {"id": "ddbc3cba-6a78-4455-89dd-2b790241675e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1612069245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e17ea72d033544159bbaea7365a7f221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01134024-43", "ovs_interfaceid": "01134024-43f6-41eb-b222-1e69cef1bfd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.851584] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779370, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.906377] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1548.906377] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e02f41c8-3a2b-4eec-9040-634975f2dfc1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.914877] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1548.914877] env[63379]: value = "task-1779372" [ 1548.914877] env[63379]: _type = "Task" [ 1548.914877] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.924990] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779372, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.925931] env[63379]: DEBUG nova.network.neutron [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Successfully updated port: fee236c0-9eaa-44e8-b51c-e97f6f003dad {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1548.946059] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779367, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805491} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.946262] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5aa36799-251b-4933-8ccd-8125995b1f8b/5aa36799-251b-4933-8ccd-8125995b1f8b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1548.946479] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1548.946735] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-931b1f75-ec3b-4b86-b695-9d746b7bbf68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.954621] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1548.954621] env[63379]: value = "task-1779373" [ 1548.954621] env[63379]: _type = "Task" [ 1548.954621] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.969909] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.064422] env[63379]: DEBUG oslo_vmware.api [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779371, 'name': PowerOffVM_Task, 'duration_secs': 0.343991} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.064722] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1549.064897] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1549.065211] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e257d239-e2a1-4db2-b02d-80670029aa77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.096165] env[63379]: DEBUG nova.compute.utils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1549.099613] env[63379]: DEBUG nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1549.099784] env[63379]: DEBUG nova.network.neutron [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1549.145193] env[63379]: DEBUG nova.policy [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87416db304754ced85d79d6d30ca2241', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce15a519ec5744feb0731439b2534fc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1549.147336] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9aba89fe-325c-4517-96cf-d2c002d94892 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "bf0dd3cf-684c-4378-a89c-5b9f16df062d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.955s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.156401] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1549.156807] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1549.156895] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Deleting the datastore file [datastore1] 650d4709-3cbc-4b9a-b165-66fa0af97c4d {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1549.157233] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-655efa20-317e-42af-8ee8-98a006bb23b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.165891] env[63379]: DEBUG oslo_vmware.api [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for the task: (returnval){ [ 1549.165891] env[63379]: value = "task-1779375" [ 1549.165891] env[63379]: _type = "Task" [ 1549.165891] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.175572] env[63379]: DEBUG oslo_vmware.api [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.290919] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Releasing lock "refresh_cache-915aec20-5765-4aad-8b0f-f2d71b7d6428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.291238] env[63379]: DEBUG nova.compute.manager [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Received event network-changed-ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1549.291422] env[63379]: DEBUG nova.compute.manager [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Refreshing instance network info cache due to event network-changed-ce8dbca6-e4fa-47a3-b501-18973a50219c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1549.291646] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Acquiring lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.291794] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Acquired lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.292135] env[63379]: DEBUG nova.network.neutron [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Refreshing network info cache for port ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.350683] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779370, 'name': CreateVM_Task, 'duration_secs': 0.675339} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.350862] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158fe346-93f5-422b-877a-8423547da58f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1549.351603] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.351789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.352151] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1549.352420] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70d0b407-c7ed-4ca8-a141-ae2d008fd272 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.359979] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1549.359979] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528bd7ce-b35b-160b-240b-0742d82178d1" [ 1549.359979] env[63379]: _type = "Task" [ 1549.359979] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.368614] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528bd7ce-b35b-160b-240b-0742d82178d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.424818] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779372, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.428717] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "refresh_cache-f10fe64d-a09e-488a-b609-3e38922cf2e0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.428891] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "refresh_cache-f10fe64d-a09e-488a-b609-3e38922cf2e0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.429073] env[63379]: DEBUG nova.network.neutron [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1549.465086] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067708} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.465403] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1549.466248] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88886ce6-77c7-4a87-9816-8fdc2eb7b5da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.492618] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 5aa36799-251b-4933-8ccd-8125995b1f8b/5aa36799-251b-4933-8ccd-8125995b1f8b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1549.493552] env[63379]: DEBUG nova.network.neutron [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Successfully created port: 8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1549.495508] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-874014f7-c68f-47b7-b770-8db9b10f6a3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.516861] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1549.516861] env[63379]: value = "task-1779376" [ 1549.516861] env[63379]: _type = "Task" [ 1549.516861] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.525613] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.607305] env[63379]: DEBUG nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1549.680067] env[63379]: DEBUG oslo_vmware.api [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Task: {'id': task-1779375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198507} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.680345] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1549.680532] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1549.680711] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1549.680883] env[63379]: INFO nova.compute.manager [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1549.681151] env[63379]: DEBUG oslo.service.loopingcall [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1549.681351] env[63379]: DEBUG nova.compute.manager [-] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1549.681441] env[63379]: DEBUG nova.network.neutron [-] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1549.880666] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528bd7ce-b35b-160b-240b-0742d82178d1, 'name': SearchDatastore_Task, 'duration_secs': 0.015411} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.880666] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.881185] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1549.881242] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.881531] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.881617] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1549.881834] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcea8d60-3752-4cdb-b42a-9edb4c59ffd9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.894046] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.894255] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1549.895702] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-548a1e4a-6dd6-4011-8a8c-89dccfb2e81c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.907768] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1549.907768] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526fef29-2d25-3d1f-beb6-c66120229281" [ 1549.907768] env[63379]: _type = "Task" [ 1549.907768] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.915427] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526fef29-2d25-3d1f-beb6-c66120229281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.929018] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779372, 'name': CreateSnapshot_Task, 'duration_secs': 0.523652} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.929351] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1549.930183] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8904c7c5-5bb7-44c5-92f2-200b797c0e59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.018866] env[63379]: DEBUG nova.network.neutron [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1550.034613] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.195239] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7650106-9d65-4022-9e94-e2d7eb30aec2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.206929] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193fadc8-736e-46b8-88e6-9a83c6b2e707 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.254702] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ea3eec-b8fd-41f0-bdd0-34896cf09f05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.267384] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8d3349-86f3-4b55-9961-8ba5a6f3d2b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.287023] env[63379]: DEBUG nova.compute.provider_tree [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.318471] env[63379]: DEBUG nova.network.neutron [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Updated VIF entry in instance network info cache for port ce8dbca6-e4fa-47a3-b501-18973a50219c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1550.318825] env[63379]: DEBUG nova.network.neutron [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Updating instance_info_cache with network_info: [{"id": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "address": "fa:16:3e:db:bf:0c", "network": {"id": "ed24d0da-9874-4940-bc5e-1aa29c68ce84", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2101419788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "692581dc5dda4b3b94565dadcd06ec38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8dbca6-e4", "ovs_interfaceid": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.329795] env[63379]: DEBUG nova.network.neutron [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Updating instance_info_cache with network_info: [{"id": "fee236c0-9eaa-44e8-b51c-e97f6f003dad", "address": "fa:16:3e:62:fd:6b", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee236c0-9e", "ovs_interfaceid": "fee236c0-9eaa-44e8-b51c-e97f6f003dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.416162] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526fef29-2d25-3d1f-beb6-c66120229281, 'name': SearchDatastore_Task, 'duration_secs': 0.028234} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.417075] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2be0ee0-c2cc-4f05-aafc-0235abe24f96 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.422926] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1550.422926] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f2cb6d-4092-eb39-bca5-e37f51520c22" [ 1550.422926] env[63379]: _type = "Task" [ 1550.422926] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.432774] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f2cb6d-4092-eb39-bca5-e37f51520c22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.458585] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1550.458585] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cce05150-87a4-4864-b357-916cbe42108b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.470554] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1550.470554] env[63379]: value = "task-1779377" [ 1550.470554] env[63379]: _type = "Task" [ 1550.470554] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.485842] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779377, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.531504] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779376, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.616760] env[63379]: DEBUG nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1550.640246] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1550.640432] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1550.640677] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1550.640925] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1550.641171] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1550.641395] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1550.641676] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1550.641937] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1550.642221] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1550.642477] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1550.642705] env[63379]: DEBUG nova.virt.hardware [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1550.643666] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecde8c2-f6da-4aba-8506-15f95e6437dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.653502] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a25d42e-8974-4f02-93e5-4eca2c9b2db8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.791709] env[63379]: DEBUG nova.scheduler.client.report [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1550.796403] env[63379]: DEBUG nova.network.neutron [-] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.798962] env[63379]: DEBUG nova.compute.manager [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Received event network-changed-fee236c0-9eaa-44e8-b51c-e97f6f003dad {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1550.799747] env[63379]: DEBUG nova.compute.manager [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Refreshing instance network info cache due to event network-changed-fee236c0-9eaa-44e8-b51c-e97f6f003dad. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1550.799747] env[63379]: DEBUG oslo_concurrency.lockutils [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] Acquiring lock "refresh_cache-f10fe64d-a09e-488a-b609-3e38922cf2e0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.821964] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c439511-073e-47dd-9b1b-c4cd6ce9fbdd req-5d544c99-dc96-4f07-bf40-28d66c2aacfc service nova] Releasing lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.834027] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "refresh_cache-f10fe64d-a09e-488a-b609-3e38922cf2e0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.834409] env[63379]: DEBUG nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Instance network_info: |[{"id": "fee236c0-9eaa-44e8-b51c-e97f6f003dad", "address": "fa:16:3e:62:fd:6b", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee236c0-9e", "ovs_interfaceid": "fee236c0-9eaa-44e8-b51c-e97f6f003dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1550.834795] env[63379]: DEBUG oslo_concurrency.lockutils [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] Acquired lock "refresh_cache-f10fe64d-a09e-488a-b609-3e38922cf2e0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.835065] env[63379]: DEBUG nova.network.neutron [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Refreshing network info cache for port fee236c0-9eaa-44e8-b51c-e97f6f003dad {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1550.836521] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:fd:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fee236c0-9eaa-44e8-b51c-e97f6f003dad', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1550.850317] env[63379]: DEBUG oslo.service.loopingcall [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1550.854386] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1550.855141] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1a039ae-2c22-4228-9faa-f732a76c74a5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.880950] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1550.880950] env[63379]: value = "task-1779378" [ 1550.880950] env[63379]: _type = "Task" [ 1550.880950] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.892922] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779378, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.934527] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f2cb6d-4092-eb39-bca5-e37f51520c22, 'name': SearchDatastore_Task, 'duration_secs': 0.014488} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.934879] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.935230] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 158fe346-93f5-422b-877a-8423547da58f/158fe346-93f5-422b-877a-8423547da58f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1550.935546] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ea89e42-af0c-4b1a-8b84-05cb7ca00da8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.944797] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1550.944797] env[63379]: value = "task-1779379" [ 1550.944797] env[63379]: _type = "Task" [ 1550.944797] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.955082] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.981061] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779377, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.034146] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779376, 'name': ReconfigVM_Task, 'duration_secs': 1.026436} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.037367] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 5aa36799-251b-4933-8ccd-8125995b1f8b/5aa36799-251b-4933-8ccd-8125995b1f8b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.038671] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-113deee4-969e-4a02-9acb-242f2c16aeaa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.046801] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1551.046801] env[63379]: value = "task-1779380" [ 1551.046801] env[63379]: _type = "Task" [ 1551.046801] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.060332] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779380, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.303728] env[63379]: INFO nova.compute.manager [-] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Took 1.62 seconds to deallocate network for instance. [ 1551.304645] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.305214] env[63379]: DEBUG nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1551.311250] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.342s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.315412] env[63379]: INFO nova.compute.claims [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1551.325384] env[63379]: DEBUG nova.network.neutron [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Updated VIF entry in instance network info cache for port fee236c0-9eaa-44e8-b51c-e97f6f003dad. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1551.325384] env[63379]: DEBUG nova.network.neutron [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Updating instance_info_cache with network_info: [{"id": "fee236c0-9eaa-44e8-b51c-e97f6f003dad", "address": "fa:16:3e:62:fd:6b", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee236c0-9e", "ovs_interfaceid": "fee236c0-9eaa-44e8-b51c-e97f6f003dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.396408] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779378, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.454969] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779379, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.481613] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779377, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.557013] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779380, 'name': Rename_Task, 'duration_secs': 0.173999} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.557390] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1551.557670] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71fdc9ac-51cd-4d3f-8a0a-7e4dc289d01f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.565645] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1551.565645] env[63379]: value = "task-1779381" [ 1551.565645] env[63379]: _type = "Task" [ 1551.565645] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.574782] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.609092] env[63379]: DEBUG nova.network.neutron [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Successfully updated port: 8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1551.820035] env[63379]: DEBUG nova.compute.utils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1551.821916] env[63379]: DEBUG nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1551.822045] env[63379]: DEBUG nova.network.neutron [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1551.826464] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.828787] env[63379]: DEBUG oslo_concurrency.lockutils [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] Releasing lock "refresh_cache-f10fe64d-a09e-488a-b609-3e38922cf2e0" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.829050] env[63379]: DEBUG nova.compute.manager [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Received event network-vif-deleted-6af296da-afd1-49bb-b790-5fb012d68a2c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1551.829286] env[63379]: INFO nova.compute.manager [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Neutron deleted interface 6af296da-afd1-49bb-b790-5fb012d68a2c; detaching it from the instance and deleting it from the info cache [ 1551.829434] env[63379]: DEBUG nova.network.neutron [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.896509] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779378, 'name': CreateVM_Task, 'duration_secs': 0.737489} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.896509] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1551.897082] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.897267] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.897623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1551.897885] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55efe2b3-400d-4a3c-a1ee-07180d729d84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.903200] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1551.903200] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5209cde7-bab2-d563-80aa-9bf50dcf61e0" [ 1551.903200] env[63379]: _type = "Task" [ 1551.903200] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.913595] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5209cde7-bab2-d563-80aa-9bf50dcf61e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.930792] env[63379]: DEBUG nova.policy [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87416db304754ced85d79d6d30ca2241', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce15a519ec5744feb0731439b2534fc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1551.958867] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779379, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52304} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.958867] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 158fe346-93f5-422b-877a-8423547da58f/158fe346-93f5-422b-877a-8423547da58f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1551.958867] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1551.958867] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-087ac0b8-52ae-4b39-b513-4ec9d150e126 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.966607] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1551.966607] env[63379]: value = "task-1779382" [ 1551.966607] env[63379]: _type = "Task" [ 1551.966607] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.979423] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.986871] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779377, 'name': CloneVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.076040] env[63379]: DEBUG oslo_vmware.api [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779381, 'name': PowerOnVM_Task, 'duration_secs': 0.475875} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.076431] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1552.076776] env[63379]: INFO nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Took 9.64 seconds to spawn the instance on the hypervisor. [ 1552.076986] env[63379]: DEBUG nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1552.077835] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947a23ab-47cb-4761-b275-c59e3b922630 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.112965] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.113281] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.113535] env[63379]: DEBUG nova.network.neutron [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1552.329623] env[63379]: DEBUG nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1552.335624] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c03928b7-d94c-4f99-bb29-17cbc5e6a384 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.338379] env[63379]: DEBUG nova.network.neutron [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Successfully created port: 3d75d6b5-820e-43f4-b349-f7d9d2137fee {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1552.350315] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57080740-86e3-48e6-84e1-605139e1a137 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.389746] env[63379]: DEBUG nova.compute.manager [req-14748838-32e1-408a-9017-9286eb4c0ea1 req-0c238e3f-d1e5-4896-b875-0169ec96b86d service nova] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Detach interface failed, port_id=6af296da-afd1-49bb-b790-5fb012d68a2c, reason: Instance 650d4709-3cbc-4b9a-b165-66fa0af97c4d could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1552.416497] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5209cde7-bab2-d563-80aa-9bf50dcf61e0, 'name': SearchDatastore_Task, 'duration_secs': 0.014175} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.419372] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.419708] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1552.419990] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.420205] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.420440] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1552.420918] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25557196-2430-49ea-bd08-785589b5fdb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.433178] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1552.433436] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1552.434254] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9319f869-93f5-4a59-8c36-d65c1014206a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.440924] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1552.440924] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0bb0b-c395-0eb8-034e-2f640cccfc9d" [ 1552.440924] env[63379]: _type = "Task" [ 1552.440924] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.452616] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0bb0b-c395-0eb8-034e-2f640cccfc9d, 'name': SearchDatastore_Task, 'duration_secs': 0.009493} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.453591] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24f55e08-33da-4d87-a7cb-bbdd3f21e42e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.462029] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1552.462029] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b8140c-ba35-f449-767b-8c1b078b966d" [ 1552.462029] env[63379]: _type = "Task" [ 1552.462029] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.474785] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b8140c-ba35-f449-767b-8c1b078b966d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.486443] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779377, 'name': CloneVM_Task, 'duration_secs': 1.539367} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.491334] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Created linked-clone VM from snapshot [ 1552.491644] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779382, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10102} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.492515] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3211b3e5-064e-493f-8fec-99fbf2205177 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.495116] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1552.495895] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa8276c-26b8-450e-8255-f7679787be9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.514893] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Uploading image 806d4f4b-63da-4ae9-9dc9-7f928bfa54fa {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1552.525693] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 158fe346-93f5-422b-877a-8423547da58f/158fe346-93f5-422b-877a-8423547da58f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1552.528400] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06933654-4bd8-4c55-9b36-2dc5f0df4d67 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.551635] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1552.551635] env[63379]: value = "task-1779383" [ 1552.551635] env[63379]: _type = "Task" [ 1552.551635] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.553933] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1552.553933] env[63379]: value = "vm-369355" [ 1552.553933] env[63379]: _type = "VirtualMachine" [ 1552.553933] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1552.553933] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-94c953ff-e7bd-40d1-a0a3-1ce991e9aa1d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.567050] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779383, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.568919] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lease: (returnval){ [ 1552.568919] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52682e0d-f451-64a5-0782-6b1a5774c81f" [ 1552.568919] env[63379]: _type = "HttpNfcLease" [ 1552.568919] env[63379]: } obtained for exporting VM: (result){ [ 1552.568919] env[63379]: value = "vm-369355" [ 1552.568919] env[63379]: _type = "VirtualMachine" [ 1552.568919] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1552.569212] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the lease: (returnval){ [ 1552.569212] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52682e0d-f451-64a5-0782-6b1a5774c81f" [ 1552.569212] env[63379]: _type = "HttpNfcLease" [ 1552.569212] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1552.578013] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1552.578013] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52682e0d-f451-64a5-0782-6b1a5774c81f" [ 1552.578013] env[63379]: _type = "HttpNfcLease" [ 1552.578013] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1552.598397] env[63379]: INFO nova.compute.manager [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Took 48.63 seconds to build instance. [ 1552.667517] env[63379]: DEBUG nova.network.neutron [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.836162] env[63379]: DEBUG nova.compute.manager [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Received event network-vif-plugged-8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1552.836472] env[63379]: DEBUG oslo_concurrency.lockutils [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] Acquiring lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.837047] env[63379]: DEBUG oslo_concurrency.lockutils [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.837047] env[63379]: DEBUG oslo_concurrency.lockutils [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.837184] env[63379]: DEBUG nova.compute.manager [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] No waiting events found dispatching network-vif-plugged-8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1552.837400] env[63379]: WARNING nova.compute.manager [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Received unexpected event network-vif-plugged-8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 for instance with vm_state building and task_state spawning. [ 1552.837572] env[63379]: DEBUG nova.compute.manager [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Received event network-changed-8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1552.837740] env[63379]: DEBUG nova.compute.manager [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Refreshing instance network info cache due to event network-changed-8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1552.837940] env[63379]: DEBUG oslo_concurrency.lockutils [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] Acquiring lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.853587] env[63379]: DEBUG nova.network.neutron [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Updating instance_info_cache with network_info: [{"id": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "address": "fa:16:3e:01:39:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e6b3d77-6a", "ovs_interfaceid": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.923175] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495fb530-df2c-4ff4-85d0-14ae065ce79d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.931773] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252f3777-02ca-4562-9256-75fbfa3c0583 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.968909] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02241d9f-b9c6-4b00-9227-fb5f1f56f81a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.983785] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3a708b-4f95-451b-aaf6-1837260ab45e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.987545] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b8140c-ba35-f449-767b-8c1b078b966d, 'name': SearchDatastore_Task, 'duration_secs': 0.009704} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.987817] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.988158] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f10fe64d-a09e-488a-b609-3e38922cf2e0/f10fe64d-a09e-488a-b609-3e38922cf2e0.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1552.988728] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39f11801-b60e-44f2-8eb3-4f2673c16d9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.998509] env[63379]: DEBUG nova.compute.provider_tree [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1553.006028] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1553.006028] env[63379]: value = "task-1779385" [ 1553.006028] env[63379]: _type = "Task" [ 1553.006028] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.015836] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.064377] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779383, 'name': ReconfigVM_Task, 'duration_secs': 0.425095} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.064722] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 158fe346-93f5-422b-877a-8423547da58f/158fe346-93f5-422b-877a-8423547da58f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1553.065422] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0718ff97-3d35-4870-8199-9459c8248a4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.073697] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1553.073697] env[63379]: value = "task-1779386" [ 1553.073697] env[63379]: _type = "Task" [ 1553.073697] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.079726] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1553.079726] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52682e0d-f451-64a5-0782-6b1a5774c81f" [ 1553.079726] env[63379]: _type = "HttpNfcLease" [ 1553.079726] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1553.080432] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1553.080432] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52682e0d-f451-64a5-0782-6b1a5774c81f" [ 1553.080432] env[63379]: _type = "HttpNfcLease" [ 1553.080432] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1553.081313] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ccb957-b516-4da4-937c-2a353c60ee5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.086810] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779386, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.092013] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b794a-29d1-5841-b2e6-00db007f2e9b/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1553.092282] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b794a-29d1-5841-b2e6-00db007f2e9b/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1553.149236] env[63379]: DEBUG oslo_concurrency.lockutils [None req-137b5872-c7ec-4076-8873-595eabfd7868 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.367s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.347737] env[63379]: DEBUG nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1553.352343] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-baf2f3ba-6c58-42e1-991c-3c605f3096eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.356655] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.356894] env[63379]: DEBUG nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Instance network_info: |[{"id": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "address": "fa:16:3e:01:39:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e6b3d77-6a", "ovs_interfaceid": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1553.357232] env[63379]: DEBUG oslo_concurrency.lockutils [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] Acquired lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.357420] env[63379]: DEBUG nova.network.neutron [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Refreshing network info cache for port 8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.358732] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:39:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.368577] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating folder: Project (ce15a519ec5744feb0731439b2534fc0). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1553.372783] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e03cdfc-8282-4ed3-93c9-e4d335758476 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.384382] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1553.384827] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1553.385057] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1553.385329] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1553.385586] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1553.385797] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1553.386075] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1553.386576] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1553.386852] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1553.387144] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1553.391023] env[63379]: DEBUG nova.virt.hardware [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1553.391023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3a7a1a-ad9a-4d43-9d7d-bbf887887eaf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.396381] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created folder: Project (ce15a519ec5744feb0731439b2534fc0) in parent group-v369214. [ 1553.396647] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating folder: Instances. Parent ref: group-v369357. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1553.397822] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1677151-6e7c-4ba4-ad7c-9b7fb0086f23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.406714] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2af5ce-0744-48d5-bbce-cf7f02aa65fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.416682] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created folder: Instances in parent group-v369357. [ 1553.416948] env[63379]: DEBUG oslo.service.loopingcall [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.417217] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.417816] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7f4e706-f20e-496b-8017-d805d2396a92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.458225] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.458225] env[63379]: value = "task-1779389" [ 1553.458225] env[63379]: _type = "Task" [ 1553.458225] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.468783] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779389, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.503062] env[63379]: DEBUG nova.scheduler.client.report [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1553.516020] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779385, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496253} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.516677] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f10fe64d-a09e-488a-b609-3e38922cf2e0/f10fe64d-a09e-488a-b609-3e38922cf2e0.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1553.516922] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1553.517225] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17811555-4f3c-4c26-9235-3a1b0d743c07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.524644] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1553.524644] env[63379]: value = "task-1779390" [ 1553.524644] env[63379]: _type = "Task" [ 1553.524644] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.536355] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.588267] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779386, 'name': Rename_Task, 'duration_secs': 0.306179} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.588585] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1553.588861] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-711e6c0c-a43b-43ad-8626-fe7851a38cb8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.595610] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1553.595610] env[63379]: value = "task-1779391" [ 1553.595610] env[63379]: _type = "Task" [ 1553.595610] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.604058] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.652658] env[63379]: DEBUG nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1553.752307] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "5aa36799-251b-4933-8ccd-8125995b1f8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.752644] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.752942] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "5aa36799-251b-4933-8ccd-8125995b1f8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.753217] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.753463] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.756344] env[63379]: INFO nova.compute.manager [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Terminating instance [ 1553.758773] env[63379]: DEBUG nova.compute.manager [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1553.758988] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1553.760040] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e3b62e-bd80-48d9-928b-229578e5a4c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.769151] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1553.769820] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df9f8816-13df-4672-8639-5df7c995d2fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.777720] env[63379]: DEBUG oslo_vmware.api [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1553.777720] env[63379]: value = "task-1779392" [ 1553.777720] env[63379]: _type = "Task" [ 1553.777720] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.787399] env[63379]: DEBUG oslo_vmware.api [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.974827] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779389, 'name': CreateVM_Task, 'duration_secs': 0.438582} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.975456] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1553.976480] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.977287] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.978372] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.978372] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91da5df2-21a4-453c-a189-dd459db02303 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.988068] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1553.988068] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52559f9a-05a1-7a23-ac36-3a3e578c4f78" [ 1553.988068] env[63379]: _type = "Task" [ 1553.988068] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.996824] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52559f9a-05a1-7a23-ac36-3a3e578c4f78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.011190] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.012025] env[63379]: DEBUG nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1554.015357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.670s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.017360] env[63379]: INFO nova.compute.claims [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1554.035891] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069162} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.036447] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1554.038156] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42a4f99-2279-49d4-b9f3-e003e1d23473 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.069706] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] f10fe64d-a09e-488a-b609-3e38922cf2e0/f10fe64d-a09e-488a-b609-3e38922cf2e0.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1554.070268] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd38d9af-6b7d-4b47-a615-7a033a31adb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.095473] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1554.095473] env[63379]: value = "task-1779393" [ 1554.095473] env[63379]: _type = "Task" [ 1554.095473] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.110629] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779391, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.115540] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779393, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.188526] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.291221] env[63379]: DEBUG oslo_vmware.api [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779392, 'name': PowerOffVM_Task, 'duration_secs': 0.232616} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.291594] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1554.291721] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1554.291954] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28431401-bfa8-4b43-a1f9-b918a7265be2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.379831] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1554.380113] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1554.380324] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Deleting the datastore file [datastore1] 5aa36799-251b-4933-8ccd-8125995b1f8b {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1554.380598] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e81acfc-23ff-4469-a6c6-9101354c1ee5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.387698] env[63379]: DEBUG nova.network.neutron [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Successfully updated port: 3d75d6b5-820e-43f4-b349-f7d9d2137fee {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1554.401009] env[63379]: DEBUG oslo_vmware.api [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for the task: (returnval){ [ 1554.401009] env[63379]: value = "task-1779395" [ 1554.401009] env[63379]: _type = "Task" [ 1554.401009] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.419294] env[63379]: DEBUG oslo_vmware.api [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.498354] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52559f9a-05a1-7a23-ac36-3a3e578c4f78, 'name': SearchDatastore_Task, 'duration_secs': 0.015093} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.498779] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.499112] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1554.499517] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.499686] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.499878] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.500164] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65378e71-2d92-4407-bbdd-b0272ff0fcc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.506393] env[63379]: DEBUG nova.network.neutron [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Updated VIF entry in instance network info cache for port 8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.506737] env[63379]: DEBUG nova.network.neutron [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Updating instance_info_cache with network_info: [{"id": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "address": "fa:16:3e:01:39:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e6b3d77-6a", "ovs_interfaceid": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.520093] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.520377] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1554.522397] env[63379]: DEBUG nova.compute.utils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1554.525987] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48eda85a-e80b-4890-b5b4-20209d36bf8a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.531028] env[63379]: DEBUG nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1554.531028] env[63379]: DEBUG nova.network.neutron [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1554.539448] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1554.539448] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528c04b4-11dc-5188-0a75-7fdfdcf00039" [ 1554.539448] env[63379]: _type = "Task" [ 1554.539448] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.554366] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528c04b4-11dc-5188-0a75-7fdfdcf00039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.612807] env[63379]: DEBUG oslo_vmware.api [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779391, 'name': PowerOnVM_Task, 'duration_secs': 0.627655} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.618399] env[63379]: DEBUG nova.policy [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5fa958cb524741079d651e388f00f3c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '551ba9289da4445ea0bad784aee2e86d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1554.619569] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1554.620157] env[63379]: INFO nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1554.620965] env[63379]: DEBUG nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1554.621322] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779393, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.622526] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a278d498-2025-4b2b-875c-4dcd6cbec2b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.892797] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.893029] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.893770] env[63379]: DEBUG nova.network.neutron [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1554.914840] env[63379]: DEBUG oslo_vmware.api [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Task: {'id': task-1779395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.464986} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.915405] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1554.915769] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1554.916049] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1554.916357] env[63379]: INFO nova.compute.manager [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1554.916847] env[63379]: DEBUG oslo.service.loopingcall [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1554.917083] env[63379]: DEBUG nova.compute.manager [-] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1554.917286] env[63379]: DEBUG nova.network.neutron [-] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1554.971432] env[63379]: DEBUG nova.compute.manager [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Received event network-vif-plugged-3d75d6b5-820e-43f4-b349-f7d9d2137fee {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1554.971723] env[63379]: DEBUG oslo_concurrency.lockutils [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] Acquiring lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.972592] env[63379]: DEBUG oslo_concurrency.lockutils [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.973519] env[63379]: DEBUG oslo_concurrency.lockutils [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.973834] env[63379]: DEBUG nova.compute.manager [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] No waiting events found dispatching network-vif-plugged-3d75d6b5-820e-43f4-b349-f7d9d2137fee {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1554.974245] env[63379]: WARNING nova.compute.manager [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Received unexpected event network-vif-plugged-3d75d6b5-820e-43f4-b349-f7d9d2137fee for instance with vm_state building and task_state spawning. [ 1554.974604] env[63379]: DEBUG nova.compute.manager [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Received event network-changed-3d75d6b5-820e-43f4-b349-f7d9d2137fee {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1554.974930] env[63379]: DEBUG nova.compute.manager [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Refreshing instance network info cache due to event network-changed-3d75d6b5-820e-43f4-b349-f7d9d2137fee. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1554.975373] env[63379]: DEBUG oslo_concurrency.lockutils [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] Acquiring lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.011703] env[63379]: DEBUG oslo_concurrency.lockutils [req-929bf7c8-e940-44f5-9eca-455eb5eb34fd req-f22c6ec8-56cb-4358-b3bf-ca68d9af8e79 service nova] Releasing lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.033311] env[63379]: DEBUG nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1555.036497] env[63379]: DEBUG nova.network.neutron [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Successfully created port: bbe843e8-9156-454e-8ba4-dae6bc31c8b2 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1555.051692] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528c04b4-11dc-5188-0a75-7fdfdcf00039, 'name': SearchDatastore_Task, 'duration_secs': 0.01499} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.052755] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3296e9a5-7530-47e3-b742-0fbd21ed62ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.059117] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1555.059117] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f62210-20b9-295d-1ce9-818bbbaec7e4" [ 1555.059117] env[63379]: _type = "Task" [ 1555.059117] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.073104] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f62210-20b9-295d-1ce9-818bbbaec7e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.111220] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779393, 'name': ReconfigVM_Task, 'duration_secs': 0.528369} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.112028] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Reconfigured VM instance instance-00000030 to attach disk [datastore1] f10fe64d-a09e-488a-b609-3e38922cf2e0/f10fe64d-a09e-488a-b609-3e38922cf2e0.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.112198] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb0d042e-2000-457c-ac5e-df0ef98d8856 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.121476] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1555.121476] env[63379]: value = "task-1779396" [ 1555.121476] env[63379]: _type = "Task" [ 1555.121476] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.130782] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779396, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.151604] env[63379]: INFO nova.compute.manager [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Took 47.06 seconds to build instance. [ 1555.438811] env[63379]: DEBUG nova.network.neutron [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1555.571582] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1168cdfb-8ea9-4643-828f-0f96e9f2eb70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.582243] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f62210-20b9-295d-1ce9-818bbbaec7e4, 'name': SearchDatastore_Task, 'duration_secs': 0.024375} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.582243] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.582243] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/1d2de9da-9dfe-42d2-b206-bb5139b1970b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1555.582243] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23de2a94-ad28-44af-a41e-9fce5e86290d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.590019] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6629e5e2-c7b6-4ff4-ab62-32cd78ba7686 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.594646] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1555.594646] env[63379]: value = "task-1779397" [ 1555.594646] env[63379]: _type = "Task" [ 1555.594646] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.636836] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8df2da7-f87c-4526-8f8f-ae89a4bccc5b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.646223] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.656736] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d9d4f6-0a45-4692-9caf-d9e2bdc26d71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.662308] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c6470f4e-57f7-451d-a4eb-ee32610bc05b tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "158fe346-93f5-422b-877a-8423547da58f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.057s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.662939] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779396, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.682030] env[63379]: DEBUG nova.compute.provider_tree [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.827065] env[63379]: DEBUG nova.network.neutron [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Updating instance_info_cache with network_info: [{"id": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "address": "fa:16:3e:00:b3:c0", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d75d6b5-82", "ovs_interfaceid": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.950463] env[63379]: DEBUG nova.network.neutron [-] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.050716] env[63379]: DEBUG nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1556.078205] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:30:29Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='fd63d07f-2af7-4c40-ac44-c2f8123389ab',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-418275153',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1556.078516] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1556.078793] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1556.079799] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1556.079799] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1556.079799] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1556.079965] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1556.080165] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1556.080324] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1556.080660] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1556.080660] env[63379]: DEBUG nova.virt.hardware [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1556.081595] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2114bc3d-5910-418c-b459-e603b5f88e52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.096275] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38c6678-8315-4f13-a471-71f5279fc66f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.124249] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779397, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.138902] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779396, 'name': Rename_Task, 'duration_secs': 0.605709} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.138902] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1556.139116] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-176abfa9-bbd4-4e9c-bba5-0c5fc0010e98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.146177] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1556.146177] env[63379]: value = "task-1779398" [ 1556.146177] env[63379]: _type = "Task" [ 1556.146177] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.155426] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.166488] env[63379]: DEBUG nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1556.186325] env[63379]: DEBUG nova.scheduler.client.report [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1556.240086] env[63379]: DEBUG nova.compute.manager [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Received event network-changed-ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1556.240327] env[63379]: DEBUG nova.compute.manager [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Refreshing instance network info cache due to event network-changed-ce8dbca6-e4fa-47a3-b501-18973a50219c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1556.240615] env[63379]: DEBUG oslo_concurrency.lockutils [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] Acquiring lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.240802] env[63379]: DEBUG oslo_concurrency.lockutils [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] Acquired lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.241498] env[63379]: DEBUG nova.network.neutron [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Refreshing network info cache for port ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1556.331769] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.332076] env[63379]: DEBUG nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Instance network_info: |[{"id": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "address": "fa:16:3e:00:b3:c0", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d75d6b5-82", "ovs_interfaceid": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1556.332405] env[63379]: DEBUG oslo_concurrency.lockutils [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] Acquired lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.332593] env[63379]: DEBUG nova.network.neutron [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Refreshing network info cache for port 3d75d6b5-820e-43f4-b349-f7d9d2137fee {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1556.333874] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:b3:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d75d6b5-820e-43f4-b349-f7d9d2137fee', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1556.342030] env[63379]: DEBUG oslo.service.loopingcall [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.344992] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1556.345526] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f71e3846-3c94-4eea-862c-ca15a6f3eee9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.368724] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1556.368724] env[63379]: value = "task-1779399" [ 1556.368724] env[63379]: _type = "Task" [ 1556.368724] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.380116] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779399, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.453479] env[63379]: INFO nova.compute.manager [-] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Took 1.54 seconds to deallocate network for instance. [ 1556.606721] env[63379]: DEBUG nova.network.neutron [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Updated VIF entry in instance network info cache for port 3d75d6b5-820e-43f4-b349-f7d9d2137fee. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1556.607107] env[63379]: DEBUG nova.network.neutron [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Updating instance_info_cache with network_info: [{"id": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "address": "fa:16:3e:00:b3:c0", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d75d6b5-82", "ovs_interfaceid": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.615707] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566547} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.615994] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/1d2de9da-9dfe-42d2-b206-bb5139b1970b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1556.616513] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1556.616804] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40a13537-76a4-41ca-8f7e-eb577f1b05d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.627171] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1556.627171] env[63379]: value = "task-1779400" [ 1556.627171] env[63379]: _type = "Task" [ 1556.627171] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.639330] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779400, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.667563] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779398, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.692458] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.693426] env[63379]: DEBUG nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1556.698817] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.729s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.699950] env[63379]: DEBUG nova.objects.instance [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lazy-loading 'resources' on Instance uuid 2f98800d-800f-4ad7-bd65-f12879f02ce5 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1556.704206] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.887597] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779399, 'name': CreateVM_Task, 'duration_secs': 0.409247} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.887597] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1556.888162] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.888318] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.888792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1556.889096] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c08ac6be-ac21-42f9-b241-ebff62ae4569 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.898272] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1556.898272] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c01d8a-1fec-3291-72db-f2655525d47a" [ 1556.898272] env[63379]: _type = "Task" [ 1556.898272] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.914914] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c01d8a-1fec-3291-72db-f2655525d47a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.944069] env[63379]: DEBUG nova.network.neutron [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Successfully updated port: bbe843e8-9156-454e-8ba4-dae6bc31c8b2 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.960188] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.077325] env[63379]: DEBUG nova.network.neutron [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Updated VIF entry in instance network info cache for port ce8dbca6-e4fa-47a3-b501-18973a50219c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1557.077745] env[63379]: DEBUG nova.network.neutron [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Updating instance_info_cache with network_info: [{"id": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "address": "fa:16:3e:db:bf:0c", "network": {"id": "ed24d0da-9874-4940-bc5e-1aa29c68ce84", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2101419788-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "692581dc5dda4b3b94565dadcd06ec38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8dbca6-e4", "ovs_interfaceid": "ce8dbca6-e4fa-47a3-b501-18973a50219c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.109881] env[63379]: DEBUG oslo_concurrency.lockutils [req-09a2b25c-ee80-45e0-a5a9-fd751df57d6d req-63839669-72cd-4d20-b2e3-ab25248f9f9f service nova] Releasing lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.113646] env[63379]: DEBUG nova.compute.manager [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Received event network-vif-deleted-9c772f89-9b5d-4518-ac94-8d61ecb706db {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1557.113884] env[63379]: DEBUG nova.compute.manager [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Received event network-vif-plugged-bbe843e8-9156-454e-8ba4-dae6bc31c8b2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1557.114839] env[63379]: DEBUG oslo_concurrency.lockutils [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] Acquiring lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.115102] env[63379]: DEBUG oslo_concurrency.lockutils [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] Lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.115455] env[63379]: DEBUG oslo_concurrency.lockutils [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] Lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.115676] env[63379]: DEBUG nova.compute.manager [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] No waiting events found dispatching network-vif-plugged-bbe843e8-9156-454e-8ba4-dae6bc31c8b2 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1557.115862] env[63379]: WARNING nova.compute.manager [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Received unexpected event network-vif-plugged-bbe843e8-9156-454e-8ba4-dae6bc31c8b2 for instance with vm_state building and task_state spawning. [ 1557.116052] env[63379]: DEBUG nova.compute.manager [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Received event network-changed-bbe843e8-9156-454e-8ba4-dae6bc31c8b2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1557.116213] env[63379]: DEBUG nova.compute.manager [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Refreshing instance network info cache due to event network-changed-bbe843e8-9156-454e-8ba4-dae6bc31c8b2. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1557.116439] env[63379]: DEBUG oslo_concurrency.lockutils [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] Acquiring lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.116594] env[63379]: DEBUG oslo_concurrency.lockutils [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] Acquired lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.116755] env[63379]: DEBUG nova.network.neutron [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Refreshing network info cache for port bbe843e8-9156-454e-8ba4-dae6bc31c8b2 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1557.137588] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779400, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144683} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.137814] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1557.138648] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12a9191-044e-48e5-8d8b-461f2cda751b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.164629] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/1d2de9da-9dfe-42d2-b206-bb5139b1970b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1557.168111] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ef85dd1-27e3-4519-84b6-09a8003834da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.191051] env[63379]: DEBUG oslo_vmware.api [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779398, 'name': PowerOnVM_Task, 'duration_secs': 0.882312} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.192570] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1557.192883] env[63379]: INFO nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Took 9.27 seconds to spawn the instance on the hypervisor. [ 1557.192989] env[63379]: DEBUG nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1557.193505] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1557.193505] env[63379]: value = "task-1779401" [ 1557.193505] env[63379]: _type = "Task" [ 1557.193505] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.194304] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4489539a-f267-4083-adf7-45590fb6135b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.208580] env[63379]: DEBUG nova.compute.utils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1557.210235] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779401, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.212064] env[63379]: DEBUG nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1557.212251] env[63379]: DEBUG nova.network.neutron [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1557.272747] env[63379]: DEBUG nova.policy [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '919743457d6845ddb3f34a321dc963c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fceda42cf54845eab8068573e0f8eb26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1557.412302] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c01d8a-1fec-3291-72db-f2655525d47a, 'name': SearchDatastore_Task, 'duration_secs': 0.015839} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.415487] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.415778] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1557.416100] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.416462] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.416784] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1557.417380] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f1a8252-9814-4012-b1cd-53a6ad674530 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.428418] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1557.428831] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1557.429657] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d503b944-889c-4ccf-bb3d-5d3a505eb079 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.439793] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1557.439793] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fcbbcc-ad24-9a80-cb44-a72148068823" [ 1557.439793] env[63379]: _type = "Task" [ 1557.439793] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.447800] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.451586] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fcbbcc-ad24-9a80-cb44-a72148068823, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.580151] env[63379]: DEBUG oslo_concurrency.lockutils [req-edff8f66-5228-41ac-86b0-f20e3185d6c0 req-273c365c-c38e-47cf-8374-ec55b81c01d4 service nova] Releasing lock "refresh_cache-158fe346-93f5-422b-877a-8423547da58f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.595698] env[63379]: DEBUG nova.network.neutron [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Successfully created port: 2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1557.655985] env[63379]: DEBUG nova.network.neutron [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1557.708926] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779401, 'name': ReconfigVM_Task, 'duration_secs': 0.308319} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.709879] env[63379]: DEBUG nova.network.neutron [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.711054] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/1d2de9da-9dfe-42d2-b206-bb5139b1970b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1557.711842] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-086cf258-e340-4761-94ba-096a052575c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.717957] env[63379]: DEBUG nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1557.726761] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1557.726761] env[63379]: value = "task-1779402" [ 1557.726761] env[63379]: _type = "Task" [ 1557.726761] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.728052] env[63379]: INFO nova.compute.manager [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Took 47.53 seconds to build instance. [ 1557.742900] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779402, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.744651] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330d2b37-0795-4907-b84f-11be8f54b483 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.754042] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7eb7bf5-531a-47c2-96ec-b3499588454e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.790323] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bed8514-2979-4276-84e2-702670489d6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.802514] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1ec201-f7b2-452d-941c-fc230c6cc4fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.818998] env[63379]: DEBUG nova.compute.provider_tree [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1557.953819] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fcbbcc-ad24-9a80-cb44-a72148068823, 'name': SearchDatastore_Task, 'duration_secs': 0.022176} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.954882] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c311c493-122c-47f6-be0b-14936578c4ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.961041] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1557.961041] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207f6e2-f74c-dc2c-95c9-5eb358f3ff04" [ 1557.961041] env[63379]: _type = "Task" [ 1557.961041] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.969767] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207f6e2-f74c-dc2c-95c9-5eb358f3ff04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.212483] env[63379]: DEBUG oslo_concurrency.lockutils [req-6d0b0017-c306-41cb-8c2a-12f2dcf5a203 req-9cca6d20-ef4c-4ad6-9436-80797f9f9431 service nova] Releasing lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.212861] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.213043] env[63379]: DEBUG nova.network.neutron [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1558.235152] env[63379]: DEBUG oslo_concurrency.lockutils [None req-40fa4bd1-9030-4ca4-a38b-7e9c5d277e4c tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.720s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.242476] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779402, 'name': Rename_Task, 'duration_secs': 0.158122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.242809] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1558.243093] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71d6f706-7f9c-4e59-9fa7-779e5992e198 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.251579] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1558.251579] env[63379]: value = "task-1779403" [ 1558.251579] env[63379]: _type = "Task" [ 1558.251579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.261575] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.342690] env[63379]: ERROR nova.scheduler.client.report [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] [req-939e1a9c-50ad-420f-ac7f-f01e467652e4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-939e1a9c-50ad-420f-ac7f-f01e467652e4"}]} [ 1558.363105] env[63379]: DEBUG nova.scheduler.client.report [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1558.377859] env[63379]: DEBUG nova.scheduler.client.report [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1558.378140] env[63379]: DEBUG nova.compute.provider_tree [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1558.389559] env[63379]: DEBUG nova.scheduler.client.report [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1558.410942] env[63379]: DEBUG nova.scheduler.client.report [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1558.472770] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5207f6e2-f74c-dc2c-95c9-5eb358f3ff04, 'name': SearchDatastore_Task, 'duration_secs': 0.014632} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.475921] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.476654] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/3b662a31-76b9-4ac8-a6bd-bc4983f7fec9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1558.476814] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-983b66ec-3f52-49d4-aa9b-603f7769a412 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.486089] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1558.486089] env[63379]: value = "task-1779404" [ 1558.486089] env[63379]: _type = "Task" [ 1558.486089] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.498128] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779404, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.733698] env[63379]: DEBUG nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1558.739022] env[63379]: DEBUG nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1558.765774] env[63379]: DEBUG nova.network.neutron [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1558.771135] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1558.771453] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1558.771624] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1558.771812] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1558.772056] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1558.772267] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1558.772497] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1558.772664] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1558.772834] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1558.773012] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1558.773203] env[63379]: DEBUG nova.virt.hardware [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1558.774060] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b3b5fc-4f52-4567-a91a-36cd30b9bf48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.781158] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779403, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.792671] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cb47fd-ed65-448a-b1d1-9f8132e6b6d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.938497] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5d74af-467f-4c6d-9944-0256fef6ea5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.942613] env[63379]: DEBUG nova.network.neutron [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance_info_cache with network_info: [{"id": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "address": "fa:16:3e:0c:14:52", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbe843e8-91", "ovs_interfaceid": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.953679] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4d5765-3a58-4998-a4b2-75bc80ed1a6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.996233] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18db1db-88f7-4b1e-a539-662d2fef7a1f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.010367] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779404, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.012149] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb99978-fa95-4556-a65f-a216ab2847f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.030366] env[63379]: DEBUG nova.compute.provider_tree [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1559.174682] env[63379]: DEBUG nova.compute.manager [req-ebaf5177-e735-4ce1-9d5c-23a554079bbd req-51c63852-bb53-4524-80ca-8e35763f6ac0 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Received event network-vif-plugged-2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1559.175140] env[63379]: DEBUG oslo_concurrency.lockutils [req-ebaf5177-e735-4ce1-9d5c-23a554079bbd req-51c63852-bb53-4524-80ca-8e35763f6ac0 service nova] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.175600] env[63379]: DEBUG oslo_concurrency.lockutils [req-ebaf5177-e735-4ce1-9d5c-23a554079bbd req-51c63852-bb53-4524-80ca-8e35763f6ac0 service nova] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.175853] env[63379]: DEBUG oslo_concurrency.lockutils [req-ebaf5177-e735-4ce1-9d5c-23a554079bbd req-51c63852-bb53-4524-80ca-8e35763f6ac0 service nova] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.176110] env[63379]: DEBUG nova.compute.manager [req-ebaf5177-e735-4ce1-9d5c-23a554079bbd req-51c63852-bb53-4524-80ca-8e35763f6ac0 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] No waiting events found dispatching network-vif-plugged-2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1559.176477] env[63379]: WARNING nova.compute.manager [req-ebaf5177-e735-4ce1-9d5c-23a554079bbd req-51c63852-bb53-4524-80ca-8e35763f6ac0 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Received unexpected event network-vif-plugged-2ac41cb5-759a-42a6-a664-26ad0cc81d81 for instance with vm_state building and task_state spawning. [ 1559.248091] env[63379]: DEBUG nova.network.neutron [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Successfully updated port: 2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1559.273054] env[63379]: DEBUG oslo_vmware.api [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779403, 'name': PowerOnVM_Task, 'duration_secs': 0.864949} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.273054] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1559.273054] env[63379]: INFO nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Took 8.66 seconds to spawn the instance on the hypervisor. [ 1559.273054] env[63379]: DEBUG nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1559.273834] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b7fab0-23f3-44bf-9479-ce8d23e161a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.278306] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.450692] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.451046] env[63379]: DEBUG nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Instance network_info: |[{"id": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "address": "fa:16:3e:0c:14:52", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbe843e8-91", "ovs_interfaceid": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1559.451510] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:14:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbe843e8-9156-454e-8ba4-dae6bc31c8b2', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1559.459158] env[63379]: DEBUG oslo.service.loopingcall [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1559.460027] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1559.460027] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e39f056a-b2db-4d37-a7f3-81f37356c3c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.480200] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1559.480200] env[63379]: value = "task-1779405" [ 1559.480200] env[63379]: _type = "Task" [ 1559.480200] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.488660] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779405, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.501411] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779404, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713685} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.501674] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/3b662a31-76b9-4ac8-a6bd-bc4983f7fec9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1559.501887] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1559.502160] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-558989ed-9b8b-4839-88b5-39d353fbac55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.512357] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1559.512357] env[63379]: value = "task-1779406" [ 1559.512357] env[63379]: _type = "Task" [ 1559.512357] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.521645] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779406, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.564668] env[63379]: DEBUG nova.scheduler.client.report [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1559.565247] env[63379]: DEBUG nova.compute.provider_tree [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 72 to 73 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1559.565247] env[63379]: DEBUG nova.compute.provider_tree [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1559.597842] env[63379]: DEBUG nova.compute.manager [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1559.598825] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a2925d-e964-4b87-bb90-34f1e4090918 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.761364] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.762048] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.762048] env[63379]: DEBUG nova.network.neutron [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1559.794192] env[63379]: INFO nova.compute.manager [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Took 44.55 seconds to build instance. [ 1559.994514] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779405, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.024908] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779406, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11673} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.025348] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1560.026527] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6483d165-c60c-4e00-83ed-aa4c438376db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.061477] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/3b662a31-76b9-4ac8-a6bd-bc4983f7fec9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1560.061934] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-867b455d-348b-4561-a99c-6ce387cce47c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.085817] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.387s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.089179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.858s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.089669] env[63379]: DEBUG nova.objects.instance [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lazy-loading 'resources' on Instance uuid e838f54f-99f2-4f39-a9d2-725be8a5b3ce {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.099285] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1560.099285] env[63379]: value = "task-1779407" [ 1560.099285] env[63379]: _type = "Task" [ 1560.099285] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.109722] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.110774] env[63379]: INFO nova.scheduler.client.report [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted allocations for instance 2f98800d-800f-4ad7-bd65-f12879f02ce5 [ 1560.112423] env[63379]: INFO nova.compute.manager [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] instance snapshotting [ 1560.117834] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c14c5a1-2077-42ec-a5b3-1a9bc00cb36d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.139822] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7ccb29-c673-4339-9937-804c31b3429f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.296433] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6c1e585-d91d-476e-be4f-48d8d80ce04e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.671s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.310916] env[63379]: DEBUG nova.network.neutron [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1560.492719] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779405, 'name': CreateVM_Task, 'duration_secs': 0.616917} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.492906] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1560.493722] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.493941] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.494333] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1560.494657] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa65108b-5ea3-4718-9250-0549fa3641ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.502593] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1560.502593] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525544af-6ab2-feb9-f0f7-5254c6a09c91" [ 1560.502593] env[63379]: _type = "Task" [ 1560.502593] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.512246] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525544af-6ab2-feb9-f0f7-5254c6a09c91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.562918] env[63379]: DEBUG nova.network.neutron [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating instance_info_cache with network_info: [{"id": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "address": "fa:16:3e:00:25:b1", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac41cb5-75", "ovs_interfaceid": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.610496] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779407, 'name': ReconfigVM_Task, 'duration_secs': 0.405416} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.610845] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/3b662a31-76b9-4ac8-a6bd-bc4983f7fec9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1560.611564] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06fb67c1-1fc3-4aea-91a8-807ac7ca27bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.620071] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de9a04ff-b298-46a2-a268-8f5ae7ba0235 tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "2f98800d-800f-4ad7-bd65-f12879f02ce5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.622s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.623106] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1560.623106] env[63379]: value = "task-1779408" [ 1560.623106] env[63379]: _type = "Task" [ 1560.623106] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.635294] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779408, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.653583] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1560.654383] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0f3c8ed9-ec6c-4294-948a-bdabe4c3a487 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.662407] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1560.662407] env[63379]: value = "task-1779409" [ 1560.662407] env[63379]: _type = "Task" [ 1560.662407] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.674739] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779409, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.799306] env[63379]: DEBUG nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1561.018357] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525544af-6ab2-feb9-f0f7-5254c6a09c91, 'name': SearchDatastore_Task, 'duration_secs': 0.035617} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.018715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.018997] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1561.019328] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.019620] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.019850] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1561.020240] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-267f96ba-0adf-45b9-847f-15552d688c34 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.034012] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1561.034344] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1561.035253] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4abdb9ce-0e26-4e81-8030-4614bf93a4b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.045649] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1561.045649] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52407ccb-dc92-024a-1792-ebe8c7621023" [ 1561.045649] env[63379]: _type = "Task" [ 1561.045649] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.054985] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52407ccb-dc92-024a-1792-ebe8c7621023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.056932] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ca7108-6f96-4de4-a768-eb7f01d54494 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.065360] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b85e560-acec-4468-beac-8060df727a30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.068839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.069142] env[63379]: DEBUG nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Instance network_info: |[{"id": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "address": "fa:16:3e:00:25:b1", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac41cb5-75", "ovs_interfaceid": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1561.069528] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:25:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ac41cb5-759a-42a6-a664-26ad0cc81d81', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1561.078504] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Creating folder: Project (fceda42cf54845eab8068573e0f8eb26). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1561.079185] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b06c0566-ac59-43be-a0f2-194bc528a0bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.110284] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f46535f-cde6-4c6b-b945-7d3c10fa55cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.112974] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Created folder: Project (fceda42cf54845eab8068573e0f8eb26) in parent group-v369214. [ 1561.113226] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Creating folder: Instances. Parent ref: group-v369362. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1561.113455] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-deb107a4-f25d-4cc2-8e4c-927f3995fa30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.120391] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91090158-771d-4cbc-bd34-e120ca66c882 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.128678] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Created folder: Instances in parent group-v369362. [ 1561.128924] env[63379]: DEBUG oslo.service.loopingcall [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.129540] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1561.129777] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee81177c-0c05-42a5-9804-ca9cc8863918 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.154249] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779408, 'name': Rename_Task, 'duration_secs': 0.418125} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.154776] env[63379]: DEBUG nova.compute.provider_tree [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.156575] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1561.157057] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56e23b94-3cb6-49b9-bd2c-7b0598af3af5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.162738] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1561.162738] env[63379]: value = "task-1779412" [ 1561.162738] env[63379]: _type = "Task" [ 1561.162738] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.170886] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1561.170886] env[63379]: value = "task-1779413" [ 1561.170886] env[63379]: _type = "Task" [ 1561.170886] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.181316] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779412, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.181578] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779409, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.193015] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779413, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.206384] env[63379]: DEBUG nova.compute.manager [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Received event network-changed-2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1561.206578] env[63379]: DEBUG nova.compute.manager [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Refreshing instance network info cache due to event network-changed-2ac41cb5-759a-42a6-a664-26ad0cc81d81. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1561.206854] env[63379]: DEBUG oslo_concurrency.lockutils [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] Acquiring lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.207081] env[63379]: DEBUG oslo_concurrency.lockutils [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] Acquired lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.207245] env[63379]: DEBUG nova.network.neutron [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Refreshing network info cache for port 2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.322692] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.557164] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52407ccb-dc92-024a-1792-ebe8c7621023, 'name': SearchDatastore_Task, 'duration_secs': 0.020229} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.558102] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d680217-0aab-47e0-aad5-66d17a5a8c55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.565044] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1561.565044] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2a1fb-3e9b-c7a3-9f9a-0c7a8db8143e" [ 1561.565044] env[63379]: _type = "Task" [ 1561.565044] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.576103] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2a1fb-3e9b-c7a3-9f9a-0c7a8db8143e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.659315] env[63379]: DEBUG nova.scheduler.client.report [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1561.682527] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779412, 'name': CreateVM_Task, 'duration_secs': 0.457444} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.686628] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1561.687419] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779409, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.688288] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.688501] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.688870] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1561.693048] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7483473d-ad24-4b4c-898b-080257d68a78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.695883] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779413, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.700611] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1561.700611] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524a94b4-bd35-a241-8301-1aeba95b1563" [ 1561.700611] env[63379]: _type = "Task" [ 1561.700611] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.713992] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524a94b4-bd35-a241-8301-1aeba95b1563, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.069810] env[63379]: DEBUG nova.network.neutron [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updated VIF entry in instance network info cache for port 2ac41cb5-759a-42a6-a664-26ad0cc81d81. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.070197] env[63379]: DEBUG nova.network.neutron [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating instance_info_cache with network_info: [{"id": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "address": "fa:16:3e:00:25:b1", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac41cb5-75", "ovs_interfaceid": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.077988] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2a1fb-3e9b-c7a3-9f9a-0c7a8db8143e, 'name': SearchDatastore_Task, 'duration_secs': 0.014211} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.078340] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.078606] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1562.079055] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00bab7ad-3115-431b-9454-a8c84800ee5e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.087534] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1562.087534] env[63379]: value = "task-1779414" [ 1562.087534] env[63379]: _type = "Task" [ 1562.087534] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.097343] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.176022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.087s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.178455] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779409, 'name': CreateSnapshot_Task, 'duration_secs': 1.264975} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.179593] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.319s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.180776] env[63379]: INFO nova.compute.claims [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.187792] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1562.188897] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9b8d5e-2c1b-4b80-a0b3-75c61cc4c8bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.203773] env[63379]: DEBUG oslo_vmware.api [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779413, 'name': PowerOnVM_Task, 'duration_secs': 0.866354} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.204857] env[63379]: INFO nova.scheduler.client.report [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Deleted allocations for instance e838f54f-99f2-4f39-a9d2-725be8a5b3ce [ 1562.209627] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1562.209850] env[63379]: INFO nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1562.210120] env[63379]: DEBUG nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1562.213222] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b96e226-03f8-472b-bd25-f6fb21a885c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.228359] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524a94b4-bd35-a241-8301-1aeba95b1563, 'name': SearchDatastore_Task, 'duration_secs': 0.023698} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.228917] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.229186] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1562.229435] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.229588] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.229769] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1562.230109] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d097b5d-7624-44b7-9412-98f556783c9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.251104] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1562.251830] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1562.252546] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78b5aff6-1789-4969-876e-4b1536cde491 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.260132] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1562.260132] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aea655-6da4-63e2-49f0-0d46c346ba81" [ 1562.260132] env[63379]: _type = "Task" [ 1562.260132] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.272329] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aea655-6da4-63e2-49f0-0d46c346ba81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.425138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Acquiring lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.425466] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.574134] env[63379]: DEBUG oslo_concurrency.lockutils [req-04f0535a-fa72-4af5-be94-9833dddb1174 req-1a60f87a-81ad-4971-b5c8-50b5da85fbc4 service nova] Releasing lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.602559] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779414, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.716482] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1562.720091] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c20fa560-59c0-426d-a371-d4cc09042f26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.723698] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b54e620a-f62a-41d3-a01c-3a7327a2adaa tempest-MultipleCreateTestJSON-2001330541 tempest-MultipleCreateTestJSON-2001330541-project-member] Lock "e838f54f-99f2-4f39-a9d2-725be8a5b3ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.532s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.738140] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1562.738140] env[63379]: value = "task-1779415" [ 1562.738140] env[63379]: _type = "Task" [ 1562.738140] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.745081] env[63379]: INFO nova.compute.manager [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Took 44.32 seconds to build instance. [ 1562.757123] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779415, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.775081] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aea655-6da4-63e2-49f0-0d46c346ba81, 'name': SearchDatastore_Task, 'duration_secs': 0.016408} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.775657] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21a06621-c131-45a5-b5ae-418c3dc17dfa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.788309] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1562.788309] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52243b33-3e99-66ff-888b-a4940025521c" [ 1562.788309] env[63379]: _type = "Task" [ 1562.788309] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.799867] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52243b33-3e99-66ff-888b-a4940025521c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.928613] env[63379]: DEBUG nova.compute.utils [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1563.100563] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708587} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.101073] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1563.101380] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1563.101731] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13cff450-6a2a-40f7-ae3f-4d3699b4bb47 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.113032] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1563.113032] env[63379]: value = "task-1779416" [ 1563.113032] env[63379]: _type = "Task" [ 1563.113032] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.124116] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779416, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.248374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e75c63f0-7062-4c9e-b930-5a28d253389e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.516s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.260353] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779415, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.306902] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52243b33-3e99-66ff-888b-a4940025521c, 'name': SearchDatastore_Task, 'duration_secs': 0.019728} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.308064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.308064] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1d76a28f-822d-4b4f-be2f-2ad3371b3979/1d76a28f-822d-4b4f-be2f-2ad3371b3979.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1563.308064] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a554eff7-1226-4068-8c73-b477c7000706 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.320962] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1563.320962] env[63379]: value = "task-1779417" [ 1563.320962] env[63379]: _type = "Task" [ 1563.320962] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.334716] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.417695] env[63379]: INFO nova.compute.manager [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Rescuing [ 1563.417996] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.418179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.418355] env[63379]: DEBUG nova.network.neutron [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1563.431152] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.627448] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779416, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079231} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.627964] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1563.629195] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940818de-6c57-49a1-8c2a-13c1b4a3f5d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.660996] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1563.664517] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf4eafb6-e680-40bc-bf22-2ddbe8d88f68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.688241] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1563.688241] env[63379]: value = "task-1779418" [ 1563.688241] env[63379]: _type = "Task" [ 1563.688241] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.702335] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779418, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.747256] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6649578f-c829-429d-a4e1-064eac698ce1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.760053] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10028cb-5f0f-4999-8cad-81c124c627d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.763871] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779415, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.794909] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31219d41-0546-469b-9267-4fa49dbf0753 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.804826] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b89363-f110-419a-aaed-011069f69d63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.826090] env[63379]: DEBUG nova.compute.provider_tree [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1563.840184] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779417, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.206394] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779418, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.255887] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779415, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.333115] env[63379]: DEBUG nova.scheduler.client.report [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1564.346781] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779417, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.78224} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.347330] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1d76a28f-822d-4b4f-be2f-2ad3371b3979/1d76a28f-822d-4b4f-be2f-2ad3371b3979.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1564.347588] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1564.347784] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45dbbf42-70f5-475b-9b57-37e14d36bf6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.357547] env[63379]: DEBUG nova.network.neutron [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Updating instance_info_cache with network_info: [{"id": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "address": "fa:16:3e:00:b3:c0", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d75d6b5-82", "ovs_interfaceid": "3d75d6b5-820e-43f4-b349-f7d9d2137fee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.362197] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1564.362197] env[63379]: value = "task-1779419" [ 1564.362197] env[63379]: _type = "Task" [ 1564.362197] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.375305] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.550166] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Acquiring lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.550530] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.550867] env[63379]: INFO nova.compute.manager [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Attaching volume 8de26acd-1826-4df7-ab2d-e7e3a910af4c to /dev/sdb [ 1564.602158] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7df8cf4-7e18-4754-b8db-fba1019ff3bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.612231] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197bd065-41ca-486c-a86e-1f821678344a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.629078] env[63379]: DEBUG nova.virt.block_device [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Updating existing volume attachment record: 80087d8f-3656-4ff5-8a61-2d536e1ac52b {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1564.705852] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779418, 'name': ReconfigVM_Task, 'duration_secs': 1.018159} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.706253] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfigured VM instance instance-00000033 to attach disk [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1564.707035] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c1e238f-db02-4508-aa6c-91a8ba3a22d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.716806] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1564.716806] env[63379]: value = "task-1779420" [ 1564.716806] env[63379]: _type = "Task" [ 1564.716806] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.742247] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779420, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.754338] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779415, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.821494] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b794a-29d1-5841-b2e6-00db007f2e9b/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1564.821974] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c65bf37-80d8-4683-8dfa-bf9a12cc5626 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.832652] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b794a-29d1-5841-b2e6-00db007f2e9b/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1564.832652] env[63379]: ERROR oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b794a-29d1-5841-b2e6-00db007f2e9b/disk-0.vmdk due to incomplete transfer. [ 1564.832652] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-aac0eb21-a134-48bb-a2ce-3bc8d5e56726 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.838165] env[63379]: DEBUG oslo_vmware.rw_handles [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b794a-29d1-5841-b2e6-00db007f2e9b/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1564.838389] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Uploaded image 806d4f4b-63da-4ae9-9dc9-7f928bfa54fa to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1564.840546] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1564.841439] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.841993] env[63379]: DEBUG nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1564.844866] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-85b8b2e4-221b-46fe-9b83-3c01fb20f218 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.847704] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.437s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.849994] env[63379]: INFO nova.compute.claims [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1564.858892] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1564.858892] env[63379]: value = "task-1779424" [ 1564.858892] env[63379]: _type = "Task" [ 1564.858892] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.864661] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.878480] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079902} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.882506] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1564.882873] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779424, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.883865] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a24f96-7ed6-4430-a3fc-a95fd185d878 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.908756] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 1d76a28f-822d-4b4f-be2f-2ad3371b3979/1d76a28f-822d-4b4f-be2f-2ad3371b3979.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.912670] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-153e73d3-8c6a-4f75-bcb2-90fb651c0b44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.943371] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1564.943371] env[63379]: value = "task-1779425" [ 1564.943371] env[63379]: _type = "Task" [ 1564.943371] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.943371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.943371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.943371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.943371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.943371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.949468] env[63379]: INFO nova.compute.manager [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Terminating instance [ 1564.951697] env[63379]: DEBUG nova.compute.manager [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1564.951920] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1564.953272] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f519fe-a83e-4baf-89f0-60e7fc541a4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.968595] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.968936] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1564.969199] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e9cc521-9afd-43e3-b6a1-b461617238dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.977419] env[63379]: DEBUG oslo_vmware.api [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1564.977419] env[63379]: value = "task-1779426" [ 1564.977419] env[63379]: _type = "Task" [ 1564.977419] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.987151] env[63379]: DEBUG oslo_vmware.api [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.228996] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779420, 'name': Rename_Task, 'duration_secs': 0.252387} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.229404] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1565.229718] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a091de3-4103-4cab-83de-fef105fef753 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.237358] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1565.237358] env[63379]: value = "task-1779427" [ 1565.237358] env[63379]: _type = "Task" [ 1565.237358] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.246583] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.256082] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779415, 'name': CloneVM_Task, 'duration_secs': 2.396615} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.256481] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Created linked-clone VM from snapshot [ 1565.257343] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ca5050-3f58-4817-a227-ddcc62586000 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.269455] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Uploading image c9c544a4-5a35-4c31-896a-05c58c561419 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1565.307140] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1565.307140] env[63379]: value = "vm-369366" [ 1565.307140] env[63379]: _type = "VirtualMachine" [ 1565.307140] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1565.307140] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e574070a-3cfe-4557-8136-faf09d58c83e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.315282] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease: (returnval){ [ 1565.315282] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da031d-f200-b769-442d-0fd9063f8bf2" [ 1565.315282] env[63379]: _type = "HttpNfcLease" [ 1565.315282] env[63379]: } obtained for exporting VM: (result){ [ 1565.315282] env[63379]: value = "vm-369366" [ 1565.315282] env[63379]: _type = "VirtualMachine" [ 1565.315282] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1565.315567] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the lease: (returnval){ [ 1565.315567] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da031d-f200-b769-442d-0fd9063f8bf2" [ 1565.315567] env[63379]: _type = "HttpNfcLease" [ 1565.315567] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1565.324160] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1565.324160] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da031d-f200-b769-442d-0fd9063f8bf2" [ 1565.324160] env[63379]: _type = "HttpNfcLease" [ 1565.324160] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1565.349263] env[63379]: DEBUG nova.compute.utils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.350799] env[63379]: DEBUG nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1565.351012] env[63379]: DEBUG nova.network.neutron [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1565.372532] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779424, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.402142] env[63379]: DEBUG nova.policy [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b70275b98f8b4569a93d289fbd25901a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '696eed8e898e4ffd831805df17a93d27', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1565.456221] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1565.456738] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-395ac948-1ce4-4944-ad21-94378ca3f94f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.465184] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779425, 'name': ReconfigVM_Task, 'duration_secs': 0.379151} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.466660] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 1d76a28f-822d-4b4f-be2f-2ad3371b3979/1d76a28f-822d-4b4f-be2f-2ad3371b3979.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1565.468271] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1565.468271] env[63379]: value = "task-1779429" [ 1565.468271] env[63379]: _type = "Task" [ 1565.468271] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.468489] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d9a78d7-9d19-49d5-825a-3cb904e645ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.480115] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.484954] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1565.484954] env[63379]: value = "task-1779430" [ 1565.484954] env[63379]: _type = "Task" [ 1565.484954] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.492709] env[63379]: DEBUG oslo_vmware.api [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779426, 'name': PowerOffVM_Task, 'duration_secs': 0.2604} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.493510] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1565.493614] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1565.493816] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d8e7e29-d139-4510-950d-c084ecce287c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.499273] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779430, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.605391] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1565.605391] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1565.605391] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Deleting the datastore file [datastore1] d2f5b406-3d0e-4150-aeaf-7cdacbc12c06 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1565.605391] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cf27ea5-8c80-42f2-b1c9-2edab6521e26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.614015] env[63379]: DEBUG oslo_vmware.api [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for the task: (returnval){ [ 1565.614015] env[63379]: value = "task-1779432" [ 1565.614015] env[63379]: _type = "Task" [ 1565.614015] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.624251] env[63379]: DEBUG oslo_vmware.api [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.729125] env[63379]: DEBUG nova.network.neutron [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Successfully created port: 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1565.748672] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779427, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.830697] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1565.830697] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da031d-f200-b769-442d-0fd9063f8bf2" [ 1565.830697] env[63379]: _type = "HttpNfcLease" [ 1565.830697] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1565.831915] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1565.831915] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52da031d-f200-b769-442d-0fd9063f8bf2" [ 1565.831915] env[63379]: _type = "HttpNfcLease" [ 1565.831915] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1565.834116] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cda2b00-1dbb-4625-b36c-8dc8e13b1dc1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.851989] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526227a5-1462-4857-0593-f1d04b581ad6/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1565.852247] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526227a5-1462-4857-0593-f1d04b581ad6/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1565.855988] env[63379]: DEBUG nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1565.943333] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779424, 'name': Destroy_Task, 'duration_secs': 0.618949} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.943644] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Destroyed the VM [ 1565.943895] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1565.945545] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1e580e53-4d52-42e5-a139-5fffa10614dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.952968] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1565.952968] env[63379]: value = "task-1779433" [ 1565.952968] env[63379]: _type = "Task" [ 1565.952968] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.966365] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779433, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.982168] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a2c43a12-1c2b-451e-a514-eef2ffedf4ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.986715] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779429, 'name': PowerOffVM_Task, 'duration_secs': 0.326672} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.990473] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1565.992288] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01558394-184d-4b29-a8bf-1e01f96195fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.004981] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779430, 'name': Rename_Task, 'duration_secs': 0.182497} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.021176] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1566.025771] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ad20953-e9a5-4f48-8bab-a9b854046bf6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.028030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47d562a-8d5e-4804-a22f-7a7a2b79c06c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.039615] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1566.039615] env[63379]: value = "task-1779434" [ 1566.039615] env[63379]: _type = "Task" [ 1566.039615] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.057461] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.079150] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.079505] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ea57a60-b4f1-4a06-9082-97fcb58eca1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.091038] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1566.091038] env[63379]: value = "task-1779435" [ 1566.091038] env[63379]: _type = "Task" [ 1566.091038] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.101746] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1566.102111] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1566.102423] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.102615] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.102870] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1566.107503] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fab122ee-6eb8-4894-b662-9de744b64769 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.124854] env[63379]: DEBUG oslo_vmware.api [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Task: {'id': task-1779432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35949} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.129411] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1566.129644] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1566.129856] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1566.130054] env[63379]: INFO nova.compute.manager [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1566.130316] env[63379]: DEBUG oslo.service.loopingcall [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1566.130552] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1566.130710] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1566.131691] env[63379]: DEBUG nova.compute.manager [-] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1566.131793] env[63379]: DEBUG nova.network.neutron [-] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1566.133425] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-468b9bc3-f46d-4e3f-8b77-0b3d24068b16 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.141942] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1566.141942] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c08c41-24af-a0b6-bb43-fa42e1ff8027" [ 1566.141942] env[63379]: _type = "Task" [ 1566.141942] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.161497] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c08c41-24af-a0b6-bb43-fa42e1ff8027, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.250791] env[63379]: DEBUG oslo_vmware.api [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779427, 'name': PowerOnVM_Task, 'duration_secs': 0.840133} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.254283] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1566.254614] env[63379]: INFO nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Took 10.20 seconds to spawn the instance on the hypervisor. [ 1566.254831] env[63379]: DEBUG nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1566.255947] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2639de74-9911-48b8-b076-58c01549f5f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.476077] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f36571a3-5e6b-4ea1-a780-64cdee713482 tempest-ServersListShow296Test-1579876506 tempest-ServersListShow296Test-1579876506-project-member] Acquiring lock "b4a0e7af-4c54-410e-b372-1ec36cbfb35e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.476415] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f36571a3-5e6b-4ea1-a780-64cdee713482 tempest-ServersListShow296Test-1579876506 tempest-ServersListShow296Test-1579876506-project-member] Lock "b4a0e7af-4c54-410e-b372-1ec36cbfb35e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.477787] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779433, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.561541] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779434, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.608724] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48dfaf06-9e4c-41da-bbb2-00a48f3e19b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.620550] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f770654-e3d0-4c1b-a842-520210344f8f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.669497] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25661b4-90f5-4e36-b0e5-741d9e772135 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.685518] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c08c41-24af-a0b6-bb43-fa42e1ff8027, 'name': SearchDatastore_Task, 'duration_secs': 0.019288} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.686716] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec0a4d0-7c27-4da4-ba41-8b0a1ec34587 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.693344] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-280c6463-007f-437d-bb36-84c2d18ca798 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.701708] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1566.701708] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526ec915-9305-28a2-40dc-63f04b50154e" [ 1566.701708] env[63379]: _type = "Task" [ 1566.701708] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.714863] env[63379]: DEBUG nova.compute.provider_tree [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.732882] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526ec915-9305-28a2-40dc-63f04b50154e, 'name': SearchDatastore_Task, 'duration_secs': 0.015782} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.735638] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.736207] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. {{(pid=63379) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1566.737193] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53abfd60-3e15-4bde-87aa-342ba03d32b1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.750308] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1566.750308] env[63379]: value = "task-1779436" [ 1566.750308] env[63379]: _type = "Task" [ 1566.750308] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.765929] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.783467] env[63379]: INFO nova.compute.manager [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Took 42.84 seconds to build instance. [ 1566.869839] env[63379]: DEBUG nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1566.903028] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1566.903265] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1566.908669] env[63379]: DEBUG nova.virt.hardware [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1566.908669] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fd0630-edab-4dae-a56a-b330e454d38e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.919153] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3311fff1-747f-4d3d-bc46-0c315964e23f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.973485] env[63379]: DEBUG oslo_vmware.api [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779433, 'name': RemoveSnapshot_Task, 'duration_secs': 0.864656} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.973973] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1566.974418] env[63379]: INFO nova.compute.manager [None req-aa947b5a-81ea-4a4c-92cd-ea50bfb162ea tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Took 18.61 seconds to snapshot the instance on the hypervisor. [ 1566.982365] env[63379]: DEBUG nova.compute.manager [None req-f36571a3-5e6b-4ea1-a780-64cdee713482 tempest-ServersListShow296Test-1579876506 tempest-ServersListShow296Test-1579876506-project-member] [instance: b4a0e7af-4c54-410e-b372-1ec36cbfb35e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1567.056052] env[63379]: DEBUG oslo_vmware.api [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779434, 'name': PowerOnVM_Task, 'duration_secs': 0.654383} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.056452] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1567.056630] env[63379]: INFO nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1567.056818] env[63379]: DEBUG nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1567.058071] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02abbc96-0222-458f-84ba-98768ae38452 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.223669] env[63379]: DEBUG nova.scheduler.client.report [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1567.268377] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779436, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.289383] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff475e72-1650-4b09-ad71-f153a6a5dcbf tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.055s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.445557] env[63379]: DEBUG nova.compute.manager [req-423947d8-7b9b-4aec-9f2a-5b4ffdfdad2c req-4bfd9cf1-d3f3-4f9e-84d5-8b0280032942 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Received event network-vif-deleted-f559ba57-d459-458a-89b0-a79226abd033 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1567.445825] env[63379]: INFO nova.compute.manager [req-423947d8-7b9b-4aec-9f2a-5b4ffdfdad2c req-4bfd9cf1-d3f3-4f9e-84d5-8b0280032942 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Neutron deleted interface f559ba57-d459-458a-89b0-a79226abd033; detaching it from the instance and deleting it from the info cache [ 1567.446025] env[63379]: DEBUG nova.network.neutron [req-423947d8-7b9b-4aec-9f2a-5b4ffdfdad2c req-4bfd9cf1-d3f3-4f9e-84d5-8b0280032942 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.493938] env[63379]: DEBUG nova.compute.manager [None req-f36571a3-5e6b-4ea1-a780-64cdee713482 tempest-ServersListShow296Test-1579876506 tempest-ServersListShow296Test-1579876506-project-member] [instance: b4a0e7af-4c54-410e-b372-1ec36cbfb35e] Instance disappeared before build. {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2440}} [ 1567.586740] env[63379]: INFO nova.compute.manager [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Took 35.37 seconds to build instance. [ 1567.638940] env[63379]: DEBUG nova.compute.manager [req-d5c882e7-5629-412d-a048-1744e6f2c05c req-0ad49b78-0fb2-4570-9901-068a12b18937 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Received event network-vif-plugged-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1567.639634] env[63379]: DEBUG oslo_concurrency.lockutils [req-d5c882e7-5629-412d-a048-1744e6f2c05c req-0ad49b78-0fb2-4570-9901-068a12b18937 service nova] Acquiring lock "6e022c9a-642b-4d96-8195-e56809bbd7b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.640280] env[63379]: DEBUG oslo_concurrency.lockutils [req-d5c882e7-5629-412d-a048-1744e6f2c05c req-0ad49b78-0fb2-4570-9901-068a12b18937 service nova] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.640280] env[63379]: DEBUG oslo_concurrency.lockutils [req-d5c882e7-5629-412d-a048-1744e6f2c05c req-0ad49b78-0fb2-4570-9901-068a12b18937 service nova] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.642900] env[63379]: DEBUG nova.compute.manager [req-d5c882e7-5629-412d-a048-1744e6f2c05c req-0ad49b78-0fb2-4570-9901-068a12b18937 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] No waiting events found dispatching network-vif-plugged-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1567.642900] env[63379]: WARNING nova.compute.manager [req-d5c882e7-5629-412d-a048-1744e6f2c05c req-0ad49b78-0fb2-4570-9901-068a12b18937 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Received unexpected event network-vif-plugged-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb for instance with vm_state building and task_state spawning. [ 1567.730567] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.883s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.731216] env[63379]: DEBUG nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1567.734140] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.766s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.734480] env[63379]: DEBUG nova.objects.instance [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lazy-loading 'resources' on Instance uuid c439fe86-fc43-4c05-a4b7-3634a043269a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1567.743109] env[63379]: DEBUG nova.network.neutron [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Successfully updated port: 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1567.764904] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621722} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.765310] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. [ 1567.766256] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c42c99-8fbe-414d-9254-7bde7122eee9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.799402] env[63379]: DEBUG nova.network.neutron [-] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.806025] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1567.808712] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cea04fcb-8b3e-4474-891f-2db6b0f20195 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.838912] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1567.838912] env[63379]: value = "task-1779438" [ 1567.838912] env[63379]: _type = "Task" [ 1567.838912] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.854584] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779438, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.949665] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9b448e2-cc4b-45e6-973f-f004128b8f05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.961882] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342ea0ca-f95e-4da9-951a-1b09b8b05909 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.008033] env[63379]: DEBUG nova.compute.manager [req-423947d8-7b9b-4aec-9f2a-5b4ffdfdad2c req-4bfd9cf1-d3f3-4f9e-84d5-8b0280032942 service nova] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Detach interface failed, port_id=f559ba57-d459-458a-89b0-a79226abd033, reason: Instance d2f5b406-3d0e-4150-aeaf-7cdacbc12c06 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1568.019404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f36571a3-5e6b-4ea1-a780-64cdee713482 tempest-ServersListShow296Test-1579876506 tempest-ServersListShow296Test-1579876506-project-member] Lock "b4a0e7af-4c54-410e-b372-1ec36cbfb35e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.542s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.089213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5d946265-38e7-405c-a49b-fdd0f1650dac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.378s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.243800] env[63379]: DEBUG nova.compute.utils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1568.249452] env[63379]: DEBUG nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1568.250061] env[63379]: DEBUG nova.network.neutron [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1568.254861] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.254861] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.254861] env[63379]: DEBUG nova.network.neutron [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1568.308336] env[63379]: INFO nova.compute.manager [-] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Took 2.18 seconds to deallocate network for instance. [ 1568.351043] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779438, 'name': ReconfigVM_Task, 'duration_secs': 0.511095} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.356040] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1568.356321] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782e12f9-0b38-4887-8fe4-0a84690dce14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.366244] env[63379]: DEBUG nova.policy [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1568.395557] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecaaadf2-8be2-484a-b3c0-4ab4d2330299 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.412620] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1568.412620] env[63379]: value = "task-1779439" [ 1568.412620] env[63379]: _type = "Task" [ 1568.412620] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.426043] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.712768] env[63379]: DEBUG nova.network.neutron [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Successfully created port: 3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1568.751496] env[63379]: DEBUG nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1568.816399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.836172] env[63379]: DEBUG nova.network.neutron [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1568.870096] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28f2c89-be96-4c78-bdf2-eb5128298fd3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.884792] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ec896a-3ac3-4556-ae60-d4937963ac11 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.928368] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183b9593-cb83-4b0a-8925-fd21acfd1e4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.947328] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779439, 'name': ReconfigVM_Task, 'duration_secs': 0.378999} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.947753] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1568.949761] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03b9692-3020-469e-a22d-0950bdaf97a5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.953921] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98fb26b3-51de-4e76-9dd0-0ac94a8ced0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.970165] env[63379]: DEBUG nova.compute.provider_tree [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.972306] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1568.972306] env[63379]: value = "task-1779440" [ 1568.972306] env[63379]: _type = "Task" [ 1568.972306] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.983032] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.119567] env[63379]: DEBUG nova.network.neutron [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updating instance_info_cache with network_info: [{"id": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "address": "fa:16:3e:7a:aa:83", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2cd71e-08", "ovs_interfaceid": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.186919] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1569.186919] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369369', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'name': 'volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd47be684-6cd8-45c6-8c6a-9a6db0390f97', 'attached_at': '', 'detached_at': '', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'serial': '8de26acd-1826-4df7-ab2d-e7e3a910af4c'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1569.187613] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cdbd8f-09f3-4da6-ab51-72cf89e160f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.211315] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da7af4c-e1e2-4dee-a589-c43f20a2afed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.241351] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c/volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1569.241681] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53e8e89c-eebb-4562-9b8f-147729401e52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.266714] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Waiting for the task: (returnval){ [ 1569.266714] env[63379]: value = "task-1779441" [ 1569.266714] env[63379]: _type = "Task" [ 1569.266714] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.279165] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779441, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.474554] env[63379]: DEBUG nova.scheduler.client.report [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1569.492722] env[63379]: DEBUG oslo_vmware.api [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779440, 'name': PowerOnVM_Task, 'duration_secs': 0.425848} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.492996] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1569.497188] env[63379]: DEBUG nova.compute.manager [None req-e53e0306-68e1-499c-8dc1-07b4bb8ee7e7 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1569.498512] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4dc434-e005-4c06-8760-96a96e8f7e13 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.564987] env[63379]: DEBUG nova.compute.manager [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Received event network-changed-2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1569.565237] env[63379]: DEBUG nova.compute.manager [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Refreshing instance network info cache due to event network-changed-2ac41cb5-759a-42a6-a664-26ad0cc81d81. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1569.565606] env[63379]: DEBUG oslo_concurrency.lockutils [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] Acquiring lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.565776] env[63379]: DEBUG oslo_concurrency.lockutils [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] Acquired lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.565947] env[63379]: DEBUG nova.network.neutron [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Refreshing network info cache for port 2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1569.623715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Releasing lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.623840] env[63379]: DEBUG nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Instance network_info: |[{"id": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "address": "fa:16:3e:7a:aa:83", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2cd71e-08", "ovs_interfaceid": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1569.624715] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:aa:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f2cd71e-08fb-4de9-9736-18ae2bbad0eb', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1569.632347] env[63379]: DEBUG oslo.service.loopingcall [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.632493] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1569.632739] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6cb740f-87cd-425a-90c7-d53caf9dc0d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.657158] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1569.657158] env[63379]: value = "task-1779442" [ 1569.657158] env[63379]: _type = "Task" [ 1569.657158] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.669735] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779442, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.701438] env[63379]: DEBUG nova.compute.manager [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Received event network-changed-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1569.701710] env[63379]: DEBUG nova.compute.manager [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Refreshing instance network info cache due to event network-changed-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1569.701986] env[63379]: DEBUG oslo_concurrency.lockutils [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] Acquiring lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.702315] env[63379]: DEBUG oslo_concurrency.lockutils [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] Acquired lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.702561] env[63379]: DEBUG nova.network.neutron [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Refreshing network info cache for port 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1569.770231] env[63379]: DEBUG nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1569.784301] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779441, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.804371] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.804668] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.804834] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.805029] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.805203] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.805358] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.805589] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.805755] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.805928] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.806108] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.806288] env[63379]: DEBUG nova.virt.hardware [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.807225] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fc9dd5-e580-4b73-aa7d-cce0829dc5a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.816449] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1a6848-8101-4d35-b40a-fcbe135f9893 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.984729] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.250s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.987754] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.666s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.988016] env[63379]: DEBUG nova.objects.instance [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lazy-loading 'resources' on Instance uuid 318355e9-b4cc-4645-ac51-b583d14e1134 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1570.020118] env[63379]: INFO nova.scheduler.client.report [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Deleted allocations for instance c439fe86-fc43-4c05-a4b7-3634a043269a [ 1570.170024] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779442, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.285009] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779441, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.343417] env[63379]: DEBUG nova.compute.manager [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1570.508449] env[63379]: DEBUG nova.network.neutron [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Successfully updated port: 3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.530866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2778ed40-f1eb-40d9-8eaa-7b5c1add45a1 tempest-AttachInterfacesV270Test-2094717890 tempest-AttachInterfacesV270Test-2094717890-project-member] Lock "c439fe86-fc43-4c05-a4b7-3634a043269a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.559s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.576970] env[63379]: DEBUG nova.network.neutron [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updated VIF entry in instance network info cache for port 2ac41cb5-759a-42a6-a664-26ad0cc81d81. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1570.577580] env[63379]: DEBUG nova.network.neutron [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating instance_info_cache with network_info: [{"id": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "address": "fa:16:3e:00:25:b1", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac41cb5-75", "ovs_interfaceid": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.579035] env[63379]: INFO nova.compute.manager [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Rebuilding instance [ 1570.625219] env[63379]: DEBUG nova.compute.manager [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1570.626122] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef166cd8-637a-464a-b1e8-20694e174a48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.661230] env[63379]: DEBUG nova.network.neutron [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updated VIF entry in instance network info cache for port 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1570.661621] env[63379]: DEBUG nova.network.neutron [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updating instance_info_cache with network_info: [{"id": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "address": "fa:16:3e:7a:aa:83", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2cd71e-08", "ovs_interfaceid": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.674713] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779442, 'name': CreateVM_Task, 'duration_secs': 0.585652} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.675646] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1570.675646] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.675773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.676435] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1570.677653] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dd69dc2-da95-48fe-937c-f3b3711abe52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.686613] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1570.686613] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524b084d-0ffb-eaf8-2d24-fec9e81eac79" [ 1570.686613] env[63379]: _type = "Task" [ 1570.686613] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.695032] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524b084d-0ffb-eaf8-2d24-fec9e81eac79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.784611] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779441, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.874213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.010819] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.011032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.011222] env[63379]: DEBUG nova.network.neutron [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1571.031141] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17fe0fc-03b3-41b4-9c54-344b77e041af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.041805] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a575427-11ab-491b-bae5-b323e7b97b03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.079816] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd12135-d96c-49e5-b2e7-9d37a9f789ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.084442] env[63379]: DEBUG oslo_concurrency.lockutils [req-f559417b-0e1c-481d-b1a4-4ea80cefa59d req-edbe31cc-7755-4d81-9edd-cddbc45c0812 service nova] Releasing lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.088783] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f249b4d0-0046-48ee-97ed-298270f7c1f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.105455] env[63379]: DEBUG nova.compute.provider_tree [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.141669] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.142398] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-074e2a9f-2e76-42dd-b86f-a1fc39afda70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.151061] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1571.151061] env[63379]: value = "task-1779443" [ 1571.151061] env[63379]: _type = "Task" [ 1571.151061] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.160655] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.169383] env[63379]: DEBUG oslo_concurrency.lockutils [req-5591f16e-469e-46b6-8a4d-8d71198599c4 req-7899373b-da3d-4c89-9b4e-3c4508c1fb44 service nova] Releasing lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.198364] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524b084d-0ffb-eaf8-2d24-fec9e81eac79, 'name': SearchDatastore_Task, 'duration_secs': 0.040904} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.198700] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.199441] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1571.199441] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.199441] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.199741] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.199877] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e2765ad-3a91-470e-9119-4d6719b184e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.220977] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.221288] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1571.222180] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3e6b905-e028-4cd9-b50e-ae8972f85291 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.229697] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1571.229697] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed2433-d6ff-a7d6-28bc-ea30078b0fab" [ 1571.229697] env[63379]: _type = "Task" [ 1571.229697] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.240667] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed2433-d6ff-a7d6-28bc-ea30078b0fab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.287818] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779441, 'name': ReconfigVM_Task, 'duration_secs': 1.546965} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.287818] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Reconfigured VM instance instance-0000000a to attach disk [datastore1] volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c/volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1571.290349] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e2786ed-e6bc-4d1c-a566-5195bb8dfd9b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.309053] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Waiting for the task: (returnval){ [ 1571.309053] env[63379]: value = "task-1779444" [ 1571.309053] env[63379]: _type = "Task" [ 1571.309053] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.320638] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.552287] env[63379]: DEBUG nova.network.neutron [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1571.611353] env[63379]: DEBUG nova.scheduler.client.report [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1571.615437] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.616504] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.616504] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.616504] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.616680] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.618893] env[63379]: INFO nova.compute.manager [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Terminating instance [ 1571.620772] env[63379]: DEBUG nova.compute.manager [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1571.620965] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1571.621908] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fca71c-0c84-4cda-b056-e2915dce1055 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.632415] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.632733] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-294bf53d-e136-4bf7-b32b-e4557d7c8fe5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.641676] env[63379]: DEBUG oslo_vmware.api [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1571.641676] env[63379]: value = "task-1779445" [ 1571.641676] env[63379]: _type = "Task" [ 1571.641676] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.653310] env[63379]: DEBUG oslo_vmware.api [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.664397] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779443, 'name': PowerOffVM_Task, 'duration_secs': 0.288775} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.664796] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1571.665634] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.665956] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52130869-a183-4ece-b9b0-0a0a93ca0921 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.675893] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1571.675893] env[63379]: value = "task-1779446" [ 1571.675893] env[63379]: _type = "Task" [ 1571.675893] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.686637] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1571.686924] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1571.687160] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369286', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'name': 'volume-084d5362-d8e9-4034-9623-555ed06a1add', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'serial': '084d5362-d8e9-4034-9623-555ed06a1add'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1571.688083] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c13fd2c-201c-47dd-8a91-1c4fbd7a860f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.712074] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676a3390-8d2f-434c-83ef-d6446ce262a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.721494] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ebfda2-f7b8-4087-8388-2d868e493869 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.744094] env[63379]: DEBUG nova.network.neutron [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.750319] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d8b81a-572c-4251-ba53-7b37d1a8a680 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.754454] env[63379]: DEBUG nova.compute.manager [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received event network-vif-plugged-3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1571.754685] env[63379]: DEBUG oslo_concurrency.lockutils [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.754910] env[63379]: DEBUG oslo_concurrency.lockutils [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.755105] env[63379]: DEBUG oslo_concurrency.lockutils [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.755305] env[63379]: DEBUG nova.compute.manager [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] No waiting events found dispatching network-vif-plugged-3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1571.755525] env[63379]: WARNING nova.compute.manager [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received unexpected event network-vif-plugged-3538ffcb-51cd-414b-ad0e-080a6e1ff138 for instance with vm_state building and task_state spawning. [ 1571.755751] env[63379]: DEBUG nova.compute.manager [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received event network-changed-3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1571.755927] env[63379]: DEBUG nova.compute.manager [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Refreshing instance network info cache due to event network-changed-3538ffcb-51cd-414b-ad0e-080a6e1ff138. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1571.756119] env[63379]: DEBUG oslo_concurrency.lockutils [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] Acquiring lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.771828] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] The volume has not been displaced from its original location: [datastore1] volume-084d5362-d8e9-4034-9623-555ed06a1add/volume-084d5362-d8e9-4034-9623-555ed06a1add.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1571.777547] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Reconfiguring VM instance instance-00000024 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1571.781738] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b7a5930-f2d2-41ee-9c3f-0906885fbe6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.796309] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed2433-d6ff-a7d6-28bc-ea30078b0fab, 'name': SearchDatastore_Task, 'duration_secs': 0.014872} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.798111] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ab45b51-a1be-4288-912d-63e556b7f605 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.805374] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1571.805374] env[63379]: value = "task-1779447" [ 1571.805374] env[63379]: _type = "Task" [ 1571.805374] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.807550] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1571.807550] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520d1c71-73c6-25ca-595c-82d4b481558e" [ 1571.807550] env[63379]: _type = "Task" [ 1571.807550] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.825802] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520d1c71-73c6-25ca-595c-82d4b481558e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.832852] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779447, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.833294] env[63379]: DEBUG oslo_vmware.api [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779444, 'name': ReconfigVM_Task, 'duration_secs': 0.21197} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.833588] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369369', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'name': 'volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd47be684-6cd8-45c6-8c6a-9a6db0390f97', 'attached_at': '', 'detached_at': '', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'serial': '8de26acd-1826-4df7-ab2d-e7e3a910af4c'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1572.120404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.125168] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.350s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.127027] env[63379]: INFO nova.compute.claims [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1572.152131] env[63379]: INFO nova.scheduler.client.report [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Deleted allocations for instance 318355e9-b4cc-4645-ac51-b583d14e1134 [ 1572.162215] env[63379]: DEBUG oslo_vmware.api [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779445, 'name': PowerOffVM_Task, 'duration_secs': 0.294329} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.162215] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1572.162215] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1572.162744] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fcd6207-5409-4b7f-8bf3-7eedae704ef3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.257363] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.257783] env[63379]: DEBUG nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Instance network_info: |[{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1572.258121] env[63379]: DEBUG oslo_concurrency.lockutils [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] Acquired lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.258381] env[63379]: DEBUG nova.network.neutron [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Refreshing network info cache for port 3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1572.259600] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:5e:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3538ffcb-51cd-414b-ad0e-080a6e1ff138', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1572.274705] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Creating folder: Project (edb0d4b37a67492f9e0275b341e80cc2). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1572.275622] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca6e5137-f669-4048-ba4b-ad0cdc25a928 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.289719] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Created folder: Project (edb0d4b37a67492f9e0275b341e80cc2) in parent group-v369214. [ 1572.289719] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Creating folder: Instances. Parent ref: group-v369371. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1572.289900] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75326150-bc91-43f3-bccd-9642d0958167 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.302988] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Created folder: Instances in parent group-v369371. [ 1572.303334] env[63379]: DEBUG oslo.service.loopingcall [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1572.303611] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1572.303782] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-338aeb17-e474-4402-b0fe-9b71cc60358a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.339927] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779447, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.347761] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1572.347761] env[63379]: value = "task-1779451" [ 1572.347761] env[63379]: _type = "Task" [ 1572.347761] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.347980] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520d1c71-73c6-25ca-595c-82d4b481558e, 'name': SearchDatastore_Task, 'duration_secs': 0.020883} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.349742] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.350193] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 6e022c9a-642b-4d96-8195-e56809bbd7b9/6e022c9a-642b-4d96-8195-e56809bbd7b9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1572.350634] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1572.350944] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1572.351444] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Deleting the datastore file [datastore1] aa44a4ff-14e5-42d2-a082-06fe0ae9646c {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1572.356467] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b04dbb0a-c76e-4561-89db-52c5f62cc3a9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.359947] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-adad6f1b-d848-4390-94ca-d190796c670f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.377648] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779451, 'name': CreateVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.383948] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1572.383948] env[63379]: value = "task-1779452" [ 1572.383948] env[63379]: _type = "Task" [ 1572.383948] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.385033] env[63379]: DEBUG oslo_vmware.api [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1572.385033] env[63379]: value = "task-1779453" [ 1572.385033] env[63379]: _type = "Task" [ 1572.385033] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.404240] env[63379]: DEBUG oslo_vmware.api [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.411047] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.665862] env[63379]: DEBUG oslo_concurrency.lockutils [None req-627eb219-2e6b-4bb3-9e67-d7025775807b tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.879s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.666897] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 24.426s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.667137] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.667343] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.667510] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.670056] env[63379]: INFO nova.compute.manager [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Terminating instance [ 1572.671781] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.671988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquired lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.672375] env[63379]: DEBUG nova.network.neutron [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1572.713953] env[63379]: INFO nova.compute.manager [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Rescuing [ 1572.714232] env[63379]: DEBUG oslo_concurrency.lockutils [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.714437] env[63379]: DEBUG oslo_concurrency.lockutils [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.714621] env[63379]: DEBUG nova.network.neutron [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1572.837686] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779447, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.867216] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779451, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.892221] env[63379]: DEBUG nova.objects.instance [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lazy-loading 'flavor' on Instance uuid d47be684-6cd8-45c6-8c6a-9a6db0390f97 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1572.917921] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779452, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.918248] env[63379]: DEBUG oslo_vmware.api [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350031} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.918499] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1572.918842] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1572.918933] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1572.919127] env[63379]: INFO nova.compute.manager [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1572.919396] env[63379]: DEBUG oslo.service.loopingcall [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1572.919602] env[63379]: DEBUG nova.compute.manager [-] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1572.919702] env[63379]: DEBUG nova.network.neutron [-] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1573.175242] env[63379]: DEBUG nova.compute.utils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Can not refresh info_cache because instance was not found {{(pid=63379) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1573.197251] env[63379]: DEBUG nova.network.neutron [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1573.285212] env[63379]: DEBUG nova.network.neutron [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updated VIF entry in instance network info cache for port 3538ffcb-51cd-414b-ad0e-080a6e1ff138. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1573.285664] env[63379]: DEBUG nova.network.neutron [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.312672] env[63379]: DEBUG nova.network.neutron [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.334532] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779447, 'name': ReconfigVM_Task, 'duration_secs': 1.391831} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.337445] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Reconfigured VM instance instance-00000024 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1573.342666] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8de3bb4-ec24-43a9-83a0-c4d1a2bf5174 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.366074] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779451, 'name': CreateVM_Task, 'duration_secs': 0.81099} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.367618] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1573.367891] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1573.367891] env[63379]: value = "task-1779454" [ 1573.367891] env[63379]: _type = "Task" [ 1573.367891] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.369196] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.369196] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.369196] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1573.369422] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26c34737-e2b4-4d5d-a4ba-64550487d0dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.380591] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1573.380591] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3a114-fc9d-9a8c-e8a4-ac57659df58f" [ 1573.380591] env[63379]: _type = "Task" [ 1573.380591] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.384140] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779454, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.394261] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3a114-fc9d-9a8c-e8a4-ac57659df58f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.406438] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779452, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719194} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.409199] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 6e022c9a-642b-4d96-8195-e56809bbd7b9/6e022c9a-642b-4d96-8195-e56809bbd7b9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1573.409483] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1573.409877] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Acquiring lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.410676] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d883632d-d439-4c3a-ba74-f30602259671 tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.860s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.413954] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7a5a6a7-3dfe-4417-beca-193dea60a0e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.417065] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.007s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.427421] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1573.427421] env[63379]: value = "task-1779455" [ 1573.427421] env[63379]: _type = "Task" [ 1573.427421] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.443297] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779455, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.685460] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7426694-7125-47c1-8cf7-d30d3a9f6bc3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.695020] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8274e6-3537-4982-af0c-f8de1ec3a5b1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.734579] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e3cd46-2369-428f-930a-202f339f713a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.743135] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d80513b-4ac2-4458-963a-21826a2cd485 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.760561] env[63379]: DEBUG nova.compute.provider_tree [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.790680] env[63379]: DEBUG oslo_concurrency.lockutils [req-3e73b2e0-8c2b-484a-a99a-17c609dcf15b req-b6030f48-348d-4f35-8c09-be7b65d4fb45 service nova] Releasing lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.815399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Releasing lock "refresh_cache-318355e9-b4cc-4645-ac51-b583d14e1134" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.815822] env[63379]: DEBUG nova.compute.manager [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1573.816028] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1573.816352] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75ebcd51-c6dd-4a53-a122-9421c5bf48ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.974229] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588ff497-f4ec-427a-aedd-e7436eb92ee9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.974229] env[63379]: DEBUG nova.network.neutron [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Updating instance_info_cache with network_info: [{"id": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "address": "fa:16:3e:01:39:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e6b3d77-6a", "ovs_interfaceid": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.974229] env[63379]: WARNING nova.virt.vmwareapi.vmops [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 318355e9-b4cc-4645-ac51-b583d14e1134 could not be found. [ 1573.974229] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1573.974229] env[63379]: INFO nova.compute.manager [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1573.974229] env[63379]: DEBUG oslo.service.loopingcall [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1573.974229] env[63379]: DEBUG nova.compute.manager [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1573.974229] env[63379]: DEBUG nova.network.neutron [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1573.974229] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779454, 'name': ReconfigVM_Task, 'duration_secs': 0.179122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.974229] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369286', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'name': 'volume-084d5362-d8e9-4034-9623-555ed06a1add', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6', 'attached_at': '', 'detached_at': '', 'volume_id': '084d5362-d8e9-4034-9623-555ed06a1add', 'serial': '084d5362-d8e9-4034-9623-555ed06a1add'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1573.974229] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1573.974229] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a060a02b-ccac-418d-bc8d-a026d8137bd3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.979863] env[63379]: INFO nova.compute.manager [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Detaching volume 8de26acd-1826-4df7-ab2d-e7e3a910af4c [ 1573.979863] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3a114-fc9d-9a8c-e8a4-ac57659df58f, 'name': SearchDatastore_Task, 'duration_secs': 0.021285} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.979863] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.979863] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1573.979863] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.979863] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.979863] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.979863] env[63379]: DEBUG nova.network.neutron [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1573.979863] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb002dbc-50f2-434d-932f-34fb8c57b416 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.979863] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1573.979863] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85253c9b-6e35-49e9-956e-0bd431d56a6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.979863] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779455, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089653} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.979863] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1573.979863] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef88c37f-38b7-4da1-82ac-1afe474e478f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.979863] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.979863] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1573.981588] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93e90f22-3e95-4856-ab4f-0d1575a2adb7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.995335] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 6e022c9a-642b-4d96-8195-e56809bbd7b9/6e022c9a-642b-4d96-8195-e56809bbd7b9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1573.996771] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1088a4d-3096-48b5-b553-a7ce9d1752d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.014497] env[63379]: INFO nova.virt.block_device [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Attempting to driver detach volume 8de26acd-1826-4df7-ab2d-e7e3a910af4c from mountpoint /dev/sdb [ 1574.014730] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1574.014917] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369369', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'name': 'volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd47be684-6cd8-45c6-8c6a-9a6db0390f97', 'attached_at': '', 'detached_at': '', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'serial': '8de26acd-1826-4df7-ab2d-e7e3a910af4c'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1574.015327] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1574.015327] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5256d811-2707-e933-894a-56af4799341e" [ 1574.015327] env[63379]: _type = "Task" [ 1574.015327] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.016208] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c07b62-0a58-466d-9fcc-c3d93999dd82 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.031060] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1574.031060] env[63379]: value = "task-1779457" [ 1574.031060] env[63379]: _type = "Task" [ 1574.031060] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.055783] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5256d811-2707-e933-894a-56af4799341e, 'name': SearchDatastore_Task, 'duration_secs': 0.023886} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.061126] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a003370-74c5-4177-b991-45ad8361cd3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.067053] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1574.067283] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1574.067461] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Deleting the datastore file [datastore1] a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1574.068703] env[63379]: DEBUG nova.compute.manager [req-b10a832f-a2c9-4a2d-b27d-7bf701b54e53 req-880f1c1c-0f1a-450c-b191-1677adb05002 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Received event network-vif-deleted-da9aa440-961a-44c6-95bd-7e4d31987617 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1574.068900] env[63379]: INFO nova.compute.manager [req-b10a832f-a2c9-4a2d-b27d-7bf701b54e53 req-880f1c1c-0f1a-450c-b191-1677adb05002 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Neutron deleted interface da9aa440-961a-44c6-95bd-7e4d31987617; detaching it from the instance and deleting it from the info cache [ 1574.069088] env[63379]: DEBUG nova.network.neutron [req-b10a832f-a2c9-4a2d-b27d-7bf701b54e53 req-880f1c1c-0f1a-450c-b191-1677adb05002 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.074668] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ec8224a-265b-407f-a39a-228ed74c4035 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.077163] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c85f4a8-2088-4813-b821-85a7e76ba55d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.079317] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.086282] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61688081-8824-400f-a40c-7ccf404bb751 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.089900] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "915aec20-5765-4aad-8b0f-f2d71b7d6428" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.090198] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.090414] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "915aec20-5765-4aad-8b0f-f2d71b7d6428-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.090600] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.090768] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.092365] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1574.092365] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5281a311-dda1-a322-8485-37e29edaf4f1" [ 1574.092365] env[63379]: _type = "Task" [ 1574.092365] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.098667] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for the task: (returnval){ [ 1574.098667] env[63379]: value = "task-1779458" [ 1574.098667] env[63379]: _type = "Task" [ 1574.098667] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.099140] env[63379]: INFO nova.compute.manager [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Terminating instance [ 1574.101067] env[63379]: DEBUG nova.compute.manager [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1574.101278] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1574.106498] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3bb429-8691-48cd-a771-3aa676e810a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.134052] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101fd95b-f4d2-4715-b6a6-8246dfcc9894 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.142220] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5281a311-dda1-a322-8485-37e29edaf4f1, 'name': SearchDatastore_Task, 'duration_secs': 0.03076} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.142518] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1574.143623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.143623] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f983d089-7cfc-46a5-8f8d-f49f67aef1da/f983d089-7cfc-46a5-8f8d-f49f67aef1da.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1574.143872] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c47f5a8-4fc0-4e5c-adaf-0f621f445101 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.163023] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-918578af-cfe4-489a-a86e-430d0cef39c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.163411] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] The volume has not been displaced from its original location: [datastore1] volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c/volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1574.168828] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Reconfiguring VM instance instance-0000000a to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1574.169686] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.170646] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc61c341-a242-45a6-b91f-c34dfcdec0e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.185547] env[63379]: DEBUG oslo_vmware.api [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1574.185547] env[63379]: value = "task-1779459" [ 1574.185547] env[63379]: _type = "Task" [ 1574.185547] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.192911] env[63379]: DEBUG nova.network.neutron [-] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.195920] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Waiting for the task: (returnval){ [ 1574.195920] env[63379]: value = "task-1779461" [ 1574.195920] env[63379]: _type = "Task" [ 1574.195920] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.197255] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1574.197255] env[63379]: value = "task-1779460" [ 1574.197255] env[63379]: _type = "Task" [ 1574.197255] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.204467] env[63379]: DEBUG oslo_vmware.api [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.217261] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779461, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.223330] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.263835] env[63379]: DEBUG nova.scheduler.client.report [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.406506] env[63379]: DEBUG oslo_concurrency.lockutils [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.448937] env[63379]: DEBUG nova.network.neutron [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.545201] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779457, 'name': ReconfigVM_Task, 'duration_secs': 0.456385} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.545591] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 6e022c9a-642b-4d96-8195-e56809bbd7b9/6e022c9a-642b-4d96-8195-e56809bbd7b9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1574.546297] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09e5ca04-4486-4dee-865e-a67211dc386b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.554933] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1574.554933] env[63379]: value = "task-1779462" [ 1574.554933] env[63379]: _type = "Task" [ 1574.554933] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.566044] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779462, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.573045] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f8f9265-4ffa-42e0-84d4-2789b37ddbba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.583665] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658b3682-2670-4607-abf7-be3e06d5c3e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.629608] env[63379]: DEBUG nova.compute.manager [req-b10a832f-a2c9-4a2d-b27d-7bf701b54e53 req-880f1c1c-0f1a-450c-b191-1677adb05002 service nova] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Detach interface failed, port_id=da9aa440-961a-44c6-95bd-7e4d31987617, reason: Instance aa44a4ff-14e5-42d2-a082-06fe0ae9646c could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1574.633636] env[63379]: DEBUG oslo_vmware.api [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Task: {'id': task-1779458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166536} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.633985] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1574.634265] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1574.634515] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1574.698162] env[63379]: DEBUG oslo_vmware.api [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779459, 'name': PowerOffVM_Task, 'duration_secs': 0.275637} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.699506] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1574.703783] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1574.704061] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1574.704351] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc2a5e2b-2902-4e67-8292-d1e05af89100 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.706651] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1fccf49-3641-4fa0-95bd-df91ba9da3aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.708267] env[63379]: INFO nova.compute.manager [-] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Took 1.79 seconds to deallocate network for instance. [ 1574.720526] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779461, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.725023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937863be-3c0c-4c07-86e3-e652ae65ce24 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.742350] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779460, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.776115] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.776833] env[63379]: DEBUG nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1574.781474] env[63379]: ERROR nova.compute.manager [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Failed to detach volume 084d5362-d8e9-4034-9623-555ed06a1add from /dev/sda: nova.exception.InstanceNotFound: Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 could not be found. [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Traceback (most recent call last): [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self.driver.rebuild(**kwargs) [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] raise NotImplementedError() [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] NotImplementedError [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] During handling of the above exception, another exception occurred: [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Traceback (most recent call last): [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self.driver.detach_volume(context, old_connection_info, [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] return self._volumeops.detach_volume(connection_info, instance) [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self._detach_volume_vmdk(connection_info, instance) [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] stable_ref.fetch_moref(session) [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] nova.exception.InstanceNotFound: Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 could not be found. [ 1574.781474] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1574.784966] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.959s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.786078] env[63379]: DEBUG nova.objects.instance [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lazy-loading 'resources' on Instance uuid 650d4709-3cbc-4b9a-b165-66fa0af97c4d {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1574.815418] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1574.817113] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1574.817113] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Deleting the datastore file [datastore1] 915aec20-5765-4aad-8b0f-f2d71b7d6428 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1574.819268] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0088ae52-4363-4789-a638-8ed9f9556cc9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.829100] env[63379]: DEBUG oslo_vmware.api [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for the task: (returnval){ [ 1574.829100] env[63379]: value = "task-1779464" [ 1574.829100] env[63379]: _type = "Task" [ 1574.829100] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.839643] env[63379]: DEBUG oslo_vmware.api [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779464, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.938432] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1574.938432] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f1daeb8-90d3-4533-9c55-fa10f4908b39 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.946843] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1574.946843] env[63379]: value = "task-1779465" [ 1574.946843] env[63379]: _type = "Task" [ 1574.946843] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.957353] env[63379]: INFO nova.compute.manager [-] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Took 1.05 seconds to deallocate network for instance. [ 1574.957722] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.972638] env[63379]: DEBUG nova.compute.utils [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Build of instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 aborted: Failed to rebuild volume backed instance. {{(pid=63379) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1574.975111] env[63379]: ERROR nova.compute.manager [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 aborted: Failed to rebuild volume backed instance. [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Traceback (most recent call last): [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self.driver.rebuild(**kwargs) [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] raise NotImplementedError() [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] NotImplementedError [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] During handling of the above exception, another exception occurred: [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Traceback (most recent call last): [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self._detach_root_volume(context, instance, root_bdm) [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] with excutils.save_and_reraise_exception(): [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self.force_reraise() [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] raise self.value [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self.driver.detach_volume(context, old_connection_info, [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] return self._volumeops.detach_volume(connection_info, instance) [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self._detach_volume_vmdk(connection_info, instance) [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] stable_ref.fetch_moref(session) [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] nova.exception.InstanceNotFound: Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 could not be found. [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] During handling of the above exception, another exception occurred: [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Traceback (most recent call last): [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 10911, in _error_out_instance_on_exception [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] yield [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 1574.975111] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self._do_rebuild_instance_with_claim( [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self._do_rebuild_instance( [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self._rebuild_default_impl(**kwargs) [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] self._rebuild_volume_backed_instance( [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] raise exception.BuildAbortException( [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] nova.exception.BuildAbortException: Build of instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 aborted: Failed to rebuild volume backed instance. [ 1574.981281] env[63379]: ERROR nova.compute.manager [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] [ 1575.066525] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779462, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.208830] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779461, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.217810] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649992} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.218089] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f983d089-7cfc-46a5-8f8d-f49f67aef1da/f983d089-7cfc-46a5-8f8d-f49f67aef1da.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1575.218389] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1575.218803] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d35c2c2c-e2c9-4ac9-b676-553600be7d2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.223143] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.226696] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1575.226696] env[63379]: value = "task-1779466" [ 1575.226696] env[63379]: _type = "Task" [ 1575.226696] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.235566] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779466, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.287114] env[63379]: DEBUG nova.compute.utils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.291604] env[63379]: DEBUG nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1575.291787] env[63379]: DEBUG nova.network.neutron [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1575.336888] env[63379]: DEBUG nova.policy [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58a15ee55d144311ab7f0a572416da0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea2c1f9216ee4d8e8349a27de543c2d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1575.344984] env[63379]: DEBUG oslo_vmware.api [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Task: {'id': task-1779464, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24264} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.345484] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1575.345717] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1575.346698] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1575.346698] env[63379]: INFO nova.compute.manager [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1575.346698] env[63379]: DEBUG oslo.service.loopingcall [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.346698] env[63379]: DEBUG nova.compute.manager [-] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1575.346698] env[63379]: DEBUG nova.network.neutron [-] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1575.468801] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.471387] env[63379]: INFO nova.compute.manager [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance disappeared during terminate [ 1575.471387] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53ff7a10-5495-4cde-8110-cc0faccef8c7 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "318355e9-b4cc-4645-ac51-b583d14e1134" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.803s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.575862] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779462, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.721195] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779461, 'name': ReconfigVM_Task, 'duration_secs': 1.298277} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.721195] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Reconfigured VM instance instance-0000000a to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1575.727722] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68caa9fd-a1cf-41f6-8715-7c1bb8c07019 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.752449] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779466, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.350177} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.755051] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1575.755705] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Waiting for the task: (returnval){ [ 1575.755705] env[63379]: value = "task-1779467" [ 1575.755705] env[63379]: _type = "Task" [ 1575.755705] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.760187] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b292189-2fa6-4c91-9814-91e046c38921 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.774354] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779467, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.792889] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] f983d089-7cfc-46a5-8f8d-f49f67aef1da/f983d089-7cfc-46a5-8f8d-f49f67aef1da.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1575.796092] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92514b5a-7edc-4f6f-8f03-b0dcb35ad6c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.810879] env[63379]: DEBUG nova.compute.utils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.820758] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1575.820758] env[63379]: value = "task-1779468" [ 1575.820758] env[63379]: _type = "Task" [ 1575.820758] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.830111] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.902218] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0eeba8-00fd-4e7d-bfee-e7030f99811f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.910362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3dc84f-5ceb-42f9-9b00-59cdb089e3b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.917030] env[63379]: DEBUG nova.objects.instance [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lazy-loading 'flavor' on Instance uuid aedff32b-b0c2-4a93-a2c6-349d26839cc4 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1575.954564] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7452dc47-714b-4529-8add-3ed3d77d9062 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.968381] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54eb588-214c-4c0c-8dac-7a1a525a6eb5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.973773] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779465, 'name': PowerOffVM_Task, 'duration_secs': 0.738754} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.974378] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1575.975594] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023cc524-5030-4bab-a070-50556cc28463 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.988676] env[63379]: DEBUG nova.compute.provider_tree [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.005646] env[63379]: DEBUG nova.scheduler.client.report [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1576.009752] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4ecca9-60f8-44ca-b312-4eae6b055d93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.013911] env[63379]: DEBUG nova.network.neutron [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Successfully created port: a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.056295] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1576.056295] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a41a1d38-0253-4d68-b4db-d9732333161a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.069925] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779462, 'name': Rename_Task, 'duration_secs': 1.076265} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.069925] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1576.069925] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1576.069925] env[63379]: value = "task-1779469" [ 1576.069925] env[63379]: _type = "Task" [ 1576.069925] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.070226] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3897dc3c-9962-4a4b-8c51-4ae7f72642df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.085411] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1576.085702] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1576.085956] env[63379]: DEBUG oslo_concurrency.lockutils [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.086124] env[63379]: DEBUG oslo_concurrency.lockutils [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.086309] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1576.086654] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1576.086654] env[63379]: value = "task-1779470" [ 1576.086654] env[63379]: _type = "Task" [ 1576.086654] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.086889] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03413a16-9bf9-47bf-b451-5e910c7bf0b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.099202] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779470, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.106638] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1576.106638] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1576.106899] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68184e60-36e5-419a-967d-f897204c7b39 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.114309] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1576.114309] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bbba9b-c085-b6c5-f55e-dabbb5bffb6e" [ 1576.114309] env[63379]: _type = "Task" [ 1576.114309] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.125824] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bbba9b-c085-b6c5-f55e-dabbb5bffb6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.188163] env[63379]: DEBUG nova.compute.manager [req-dd2b6059-acb3-4369-9109-9e3dbace16a1 req-7e00629b-5172-4857-8611-6f236664fb0c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Received event network-vif-deleted-01134024-43f6-41eb-b222-1e69cef1bfd4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1576.188462] env[63379]: INFO nova.compute.manager [req-dd2b6059-acb3-4369-9109-9e3dbace16a1 req-7e00629b-5172-4857-8611-6f236664fb0c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Neutron deleted interface 01134024-43f6-41eb-b222-1e69cef1bfd4; detaching it from the instance and deleting it from the info cache [ 1576.188713] env[63379]: DEBUG nova.network.neutron [req-dd2b6059-acb3-4369-9109-9e3dbace16a1 req-7e00629b-5172-4857-8611-6f236664fb0c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.274910] env[63379]: DEBUG oslo_vmware.api [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Task: {'id': task-1779467, 'name': ReconfigVM_Task, 'duration_secs': 0.212436} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.275054] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369369', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'name': 'volume-8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd47be684-6cd8-45c6-8c6a-9a6db0390f97', 'attached_at': '', 'detached_at': '', 'volume_id': '8de26acd-1826-4df7-ab2d-e7e3a910af4c', 'serial': '8de26acd-1826-4df7-ab2d-e7e3a910af4c'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1576.314296] env[63379]: DEBUG nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1576.331816] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.347822] env[63379]: DEBUG nova.network.neutron [-] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.425444] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.425444] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquired lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.514526] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.517383] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.329s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.518982] env[63379]: INFO nova.compute.claims [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1576.566210] env[63379]: INFO nova.scheduler.client.report [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Deleted allocations for instance 650d4709-3cbc-4b9a-b165-66fa0af97c4d [ 1576.602338] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779470, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.626417] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bbba9b-c085-b6c5-f55e-dabbb5bffb6e, 'name': SearchDatastore_Task, 'duration_secs': 0.031121} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.627337] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd4ca78f-b4ce-4a71-bf9d-a3495e97b31b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.634779] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1576.634779] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528fd040-54c7-1963-44e5-ac1f87877069" [ 1576.634779] env[63379]: _type = "Task" [ 1576.634779] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.645226] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528fd040-54c7-1963-44e5-ac1f87877069, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.692651] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16f14fbb-aa08-4735-9879-5b775e1ad36e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.702166] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3324399-4906-4e95-baef-db628f29ed95 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.747482] env[63379]: DEBUG nova.compute.manager [req-dd2b6059-acb3-4369-9109-9e3dbace16a1 req-7e00629b-5172-4857-8611-6f236664fb0c service nova] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Detach interface failed, port_id=01134024-43f6-41eb-b222-1e69cef1bfd4, reason: Instance 915aec20-5765-4aad-8b0f-f2d71b7d6428 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1576.758518] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526227a5-1462-4857-0593-f1d04b581ad6/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1576.759563] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ae7f9c-b5f8-4ffb-b89a-ca26a41b52d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.766775] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526227a5-1462-4857-0593-f1d04b581ad6/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1576.766963] env[63379]: ERROR oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526227a5-1462-4857-0593-f1d04b581ad6/disk-0.vmdk due to incomplete transfer. [ 1576.767173] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ffeb6458-cbb4-4961-925c-233570077ea6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.775514] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526227a5-1462-4857-0593-f1d04b581ad6/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1576.775768] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Uploaded image c9c544a4-5a35-4c31-896a-05c58c561419 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1576.778113] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1576.778407] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e8824fd9-5dfa-4beb-a040-26390d00d5e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.785379] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1576.785379] env[63379]: value = "task-1779471" [ 1576.785379] env[63379]: _type = "Task" [ 1576.785379] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.794841] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779471, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.835168] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779468, 'name': ReconfigVM_Task, 'duration_secs': 0.718524} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.835168] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Reconfigured VM instance instance-00000036 to attach disk [datastore1] f983d089-7cfc-46a5-8f8d-f49f67aef1da/f983d089-7cfc-46a5-8f8d-f49f67aef1da.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1576.835168] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-409fb87f-9517-464b-a970-079ede30f8b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.842234] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1576.842234] env[63379]: value = "task-1779472" [ 1576.842234] env[63379]: _type = "Task" [ 1576.842234] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.847280] env[63379]: DEBUG nova.objects.instance [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lazy-loading 'flavor' on Instance uuid d47be684-6cd8-45c6-8c6a-9a6db0390f97 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1576.852366] env[63379]: INFO nova.compute.manager [-] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Took 1.51 seconds to deallocate network for instance. [ 1576.852801] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779472, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.996305] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.016228] env[63379]: DEBUG nova.network.neutron [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1577.072661] env[63379]: DEBUG oslo_concurrency.lockutils [None req-edcf2cbc-93f9-4d7e-b9b0-2fe9bf72c175 tempest-ImagesNegativeTestJSON-1319031571 tempest-ImagesNegativeTestJSON-1319031571-project-member] Lock "650d4709-3cbc-4b9a-b165-66fa0af97c4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.548s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.101409] env[63379]: DEBUG oslo_vmware.api [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779470, 'name': PowerOnVM_Task, 'duration_secs': 0.693203} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.101409] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1577.101864] env[63379]: INFO nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Took 10.23 seconds to spawn the instance on the hypervisor. [ 1577.103936] env[63379]: DEBUG nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1577.103936] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1dcbac-8798-485b-b4a7-d552e3c0a7c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.145055] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528fd040-54c7-1963-44e5-ac1f87877069, 'name': SearchDatastore_Task, 'duration_secs': 0.014451} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.145386] env[63379]: DEBUG oslo_concurrency.lockutils [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.145662] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. {{(pid=63379) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1577.146615] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4a5edf8-7e64-4835-969b-6708b8d7f5d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.154999] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1577.154999] env[63379]: value = "task-1779473" [ 1577.154999] env[63379]: _type = "Task" [ 1577.154999] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.163139] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779473, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.295653] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779471, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.327091] env[63379]: DEBUG nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1577.355659] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779472, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.358017] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:28:22Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1491891109',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-404102141',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1577.358017] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1577.358186] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1577.358366] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1577.358517] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1577.358669] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1577.358878] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1577.359055] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1577.359231] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1577.359400] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1577.360052] env[63379]: DEBUG nova.virt.hardware [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1577.361996] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.362846] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518786da-6856-4351-893e-712a120bc92c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.373686] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884726d4-c657-456a-b36a-af8f32552813 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.624838] env[63379]: INFO nova.compute.manager [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Took 41.79 seconds to build instance. [ 1577.676107] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779473, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.807094] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779471, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.807731] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquiring lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.809193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.809193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquiring lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.809193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.809193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.810987] env[63379]: INFO nova.compute.manager [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Terminating instance [ 1577.815632] env[63379]: DEBUG nova.compute.manager [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1577.816356] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d98d7f33-28e2-4af3-ac46-ee81c2c4a54d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.828558] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32056694-cad9-496b-b9a2-11754788f154 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.874036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c405dbcb-f50e-4785-aadc-62046759988d tempest-VolumesAssistedSnapshotsTest-711968808 tempest-VolumesAssistedSnapshotsTest-711968808-project-admin] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.456s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.876187] env[63379]: WARNING nova.virt.vmwareapi.driver [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 could not be found. [ 1577.876187] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1577.882563] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e27d2d1-80ab-4de1-833a-ab366db7b981 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.885055] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779472, 'name': Rename_Task, 'duration_secs': 0.876295} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.886074] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1577.886770] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1af3caff-37f1-4036-b02a-ef9e3a8b2f1f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.897305] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08c2a15-1c84-4971-b395-9b35b170f992 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.917222] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1577.917222] env[63379]: value = "task-1779475" [ 1577.917222] env[63379]: _type = "Task" [ 1577.917222] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.935063] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779475, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.954294] env[63379]: WARNING nova.virt.vmwareapi.vmops [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 could not be found. [ 1577.954622] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1577.954800] env[63379]: INFO nova.compute.manager [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Took 0.14 seconds to destroy the instance on the hypervisor. [ 1577.955106] env[63379]: DEBUG oslo.service.loopingcall [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1577.958192] env[63379]: DEBUG nova.compute.manager [-] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1577.958326] env[63379]: DEBUG nova.network.neutron [-] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1578.127298] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c954bc89-eb12-4f56-a604-ad8d5c731fe9 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.549s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.128665] env[63379]: DEBUG nova.network.neutron [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.139368] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b65064-af23-43e5-8450-35b977e3a5d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.149612] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8c2bae-0451-4b38-9ab5-9ceff0e04977 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.193759] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366bd8c7-71f0-41ba-bc55-febc9df92dc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.196715] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779473, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.935728} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.197518] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. [ 1578.198574] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580075e4-f11e-47b8-9f76-6aaf8b8b6277 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.205380] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bbd2db-ebc9-48e7-a226-68f678e2d8bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.231433] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1578.232251] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b82ea1fd-1594-4c2a-81d3-1f11fc9d5c5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.254489] env[63379]: DEBUG nova.compute.provider_tree [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.262976] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1578.262976] env[63379]: value = "task-1779476" [ 1578.262976] env[63379]: _type = "Task" [ 1578.262976] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.272648] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779476, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.278724] env[63379]: DEBUG nova.network.neutron [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Successfully updated port: a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1578.299359] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779471, 'name': Destroy_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.434247] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779475, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.470163] env[63379]: DEBUG nova.compute.manager [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Received event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1578.470293] env[63379]: DEBUG nova.compute.manager [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing instance network info cache due to event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1578.470862] env[63379]: DEBUG oslo_concurrency.lockutils [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] Acquiring lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.633976] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Releasing lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.634155] env[63379]: DEBUG nova.compute.manager [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Inject network info {{(pid=63379) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 1578.636022] env[63379]: DEBUG nova.compute.manager [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] network_info to inject: |[{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7266}} [ 1578.642233] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Reconfiguring VM instance to set the machine id {{(pid=63379) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1578.642773] env[63379]: DEBUG oslo_concurrency.lockutils [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] Acquired lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.643071] env[63379]: DEBUG nova.network.neutron [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1578.644487] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6747d9de-0279-4a5f-8087-bf2a97fffc4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.662963] env[63379]: DEBUG oslo_vmware.api [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1578.662963] env[63379]: value = "task-1779477" [ 1578.662963] env[63379]: _type = "Task" [ 1578.662963] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.673526] env[63379]: DEBUG oslo_vmware.api [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779477, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.758705] env[63379]: DEBUG nova.scheduler.client.report [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1578.776273] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.781718] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.781838] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.781968] env[63379]: DEBUG nova.network.neutron [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1578.801618] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779471, 'name': Destroy_Task, 'duration_secs': 1.530232} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.801866] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Destroyed the VM [ 1578.802180] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1578.802452] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-70fc2e87-1460-42da-9660-4eee5c0bd0ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.811098] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1578.811098] env[63379]: value = "task-1779478" [ 1578.811098] env[63379]: _type = "Task" [ 1578.811098] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.821615] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779478, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.934645] env[63379]: DEBUG oslo_vmware.api [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779475, 'name': PowerOnVM_Task, 'duration_secs': 0.648645} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.934928] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1578.935151] env[63379]: INFO nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1578.935369] env[63379]: DEBUG nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1578.936151] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f864a25-523f-438e-9b28-829e85f64544 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.034410] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.034410] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.071553] env[63379]: DEBUG nova.objects.instance [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lazy-loading 'flavor' on Instance uuid aedff32b-b0c2-4a93-a2c6-349d26839cc4 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1579.176357] env[63379]: DEBUG oslo_vmware.api [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779477, 'name': ReconfigVM_Task, 'duration_secs': 0.262943} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.176653] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8879d40e-0ee4-442a-9ae2-d4b73ed6cb59 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Reconfigured VM instance to set the machine id {{(pid=63379) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1579.236454] env[63379]: DEBUG nova.compute.manager [req-35ffb01b-c48a-4464-956e-c3b4dc5dbc3c req-8a317c9c-5b92-4d0e-bfbc-9348526fd230 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Received event network-vif-deleted-9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1579.236764] env[63379]: INFO nova.compute.manager [req-35ffb01b-c48a-4464-956e-c3b4dc5dbc3c req-8a317c9c-5b92-4d0e-bfbc-9348526fd230 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Neutron deleted interface 9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c; detaching it from the instance and deleting it from the info cache [ 1579.236912] env[63379]: DEBUG nova.network.neutron [req-35ffb01b-c48a-4464-956e-c3b4dc5dbc3c req-8a317c9c-5b92-4d0e-bfbc-9348526fd230 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.266856] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.749s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.267844] env[63379]: DEBUG nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1579.275729] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.571s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.281250] env[63379]: INFO nova.compute.claims [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1579.298717] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779476, 'name': ReconfigVM_Task, 'duration_secs': 1.022756} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.299046] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1579.299970] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddd118f-e750-49ec-98a3-6d01ea215d00 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.340717] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c960ee09-fe2c-4c00-90eb-7c179d6433bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.363969] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779478, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.366030] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1579.366030] env[63379]: value = "task-1779479" [ 1579.366030] env[63379]: _type = "Task" [ 1579.366030] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.373335] env[63379]: DEBUG nova.network.neutron [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1579.381347] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.459446] env[63379]: DEBUG nova.network.neutron [-] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.461900] env[63379]: INFO nova.compute.manager [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Took 41.08 seconds to build instance. [ 1579.538931] env[63379]: DEBUG nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1579.579022] env[63379]: DEBUG nova.network.neutron [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updated VIF entry in instance network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1579.579022] env[63379]: DEBUG nova.network.neutron [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.582213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.742967] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2d81ffe-4b78-4356-97e0-818680259a40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.756883] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a87c81-3c0e-424f-ad91-02cbe37705bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.805507] env[63379]: DEBUG nova.compute.utils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1579.809936] env[63379]: DEBUG nova.compute.manager [req-35ffb01b-c48a-4464-956e-c3b4dc5dbc3c req-8a317c9c-5b92-4d0e-bfbc-9348526fd230 service nova] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Detach interface failed, port_id=9e56e6e1-9271-4b63-8a7f-5dbe0ed9cb1c, reason: Instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1579.810688] env[63379]: DEBUG nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1579.810877] env[63379]: DEBUG nova.network.neutron [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1579.841456] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779478, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.842722] env[63379]: DEBUG nova.network.neutron [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Updating instance_info_cache with network_info: [{"id": "a8926575-6550-43c6-b23d-a15787ee76c0", "address": "fa:16:3e:c2:d2:b4", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8926575-65", "ovs_interfaceid": "a8926575-6550-43c6-b23d-a15787ee76c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.880295] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779479, 'name': ReconfigVM_Task, 'duration_secs': 0.306189} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.880541] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1579.880808] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df1f6706-7573-49c9-b6cb-d5ef41b65384 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.890440] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1579.890440] env[63379]: value = "task-1779480" [ 1579.890440] env[63379]: _type = "Task" [ 1579.890440] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.902219] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.904087] env[63379]: DEBUG nova.policy [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a09fcc05b7d4239bcd13389bb41ebf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6552f9956224ba5a0a01328da741242', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1579.962705] env[63379]: INFO nova.compute.manager [-] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Took 2.00 seconds to deallocate network for instance. [ 1579.967327] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6e30469f-2f1a-4bf8-919b-8e4459f1f6d7 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.836s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.061504] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.081869] env[63379]: DEBUG oslo_concurrency.lockutils [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] Releasing lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.082151] env[63379]: DEBUG nova.compute.manager [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Received event network-vif-plugged-a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1580.082359] env[63379]: DEBUG oslo_concurrency.lockutils [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] Acquiring lock "38be0e8d-188b-4a98-aedc-5d941b63c000-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.082616] env[63379]: DEBUG oslo_concurrency.lockutils [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.082796] env[63379]: DEBUG oslo_concurrency.lockutils [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.082965] env[63379]: DEBUG nova.compute.manager [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] No waiting events found dispatching network-vif-plugged-a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1580.083171] env[63379]: WARNING nova.compute.manager [req-18e31967-28af-452e-aadf-c98966f6d253 req-9161f245-f86a-4d3b-911b-44139c01ab1d service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Received unexpected event network-vif-plugged-a8926575-6550-43c6-b23d-a15787ee76c0 for instance with vm_state building and task_state spawning. [ 1580.083648] env[63379]: DEBUG oslo_concurrency.lockutils [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquired lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.314718] env[63379]: DEBUG nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1580.343025] env[63379]: DEBUG oslo_vmware.api [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779478, 'name': RemoveSnapshot_Task, 'duration_secs': 1.102211} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.346264] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1580.346663] env[63379]: INFO nova.compute.manager [None req-c2acd61a-6cc1-44ac-8f75-e5680994567e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Took 20.23 seconds to snapshot the instance on the hypervisor. [ 1580.348899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Releasing lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.349181] env[63379]: DEBUG nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Instance network_info: |[{"id": "a8926575-6550-43c6-b23d-a15787ee76c0", "address": "fa:16:3e:c2:d2:b4", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8926575-65", "ovs_interfaceid": "a8926575-6550-43c6-b23d-a15787ee76c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1580.350225] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:d2:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8926575-6550-43c6-b23d-a15787ee76c0', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1580.361121] env[63379]: DEBUG oslo.service.loopingcall [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.361530] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1580.361642] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ac98ad9-1c59-4d5f-920a-6097fd806435 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.388977] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1580.388977] env[63379]: value = "task-1779481" [ 1580.388977] env[63379]: _type = "Task" [ 1580.388977] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.406028] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779481, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.411068] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779480, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.440088] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.440340] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.440621] env[63379]: INFO nova.compute.manager [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Rebooting instance [ 1580.527597] env[63379]: INFO nova.compute.manager [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Took 0.56 seconds to detach 1 volumes for instance. [ 1580.532437] env[63379]: DEBUG nova.compute.manager [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Deleting volume: 084d5362-d8e9-4034-9623-555ed06a1add {{(pid=63379) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1580.665955] env[63379]: DEBUG nova.compute.manager [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Received event network-changed-a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1580.666156] env[63379]: DEBUG nova.compute.manager [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Refreshing instance network info cache due to event network-changed-a8926575-6550-43c6-b23d-a15787ee76c0. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1580.666384] env[63379]: DEBUG oslo_concurrency.lockutils [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] Acquiring lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.666536] env[63379]: DEBUG oslo_concurrency.lockutils [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] Acquired lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.666703] env[63379]: DEBUG nova.network.neutron [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Refreshing network info cache for port a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1580.677472] env[63379]: DEBUG nova.network.neutron [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1580.909557] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779481, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.916861] env[63379]: DEBUG oslo_vmware.api [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779480, 'name': PowerOnVM_Task, 'duration_secs': 0.910496} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.917741] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1580.920833] env[63379]: DEBUG nova.compute.manager [None req-151402e2-48b1-4634-b2af-58c21d0233b9 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1580.921930] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58159f16-6944-42d2-973b-fef1e4b94c5b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.936053] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43dfb27a-eada-4df2-94e2-6b3356455f86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.947371] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81a0444-a7b5-405d-8437-09c5c9d314fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.982311] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.982712] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquired lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.982997] env[63379]: DEBUG nova.network.neutron [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1580.984879] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201746e6-c30f-46a4-b9c7-f7c22b596d0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.996765] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc1664e-7cdc-43e4-8b0a-cc1828f22548 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.013061] env[63379]: DEBUG nova.compute.provider_tree [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1581.085638] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.113179] env[63379]: DEBUG nova.network.neutron [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Successfully created port: 8c5374c2-6a00-48c8-846d-94d7f695d456 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1581.331255] env[63379]: DEBUG nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1581.365691] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1581.366162] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1581.366474] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1581.366779] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1581.367070] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1581.367340] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1581.367681] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1581.367975] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1581.368294] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1581.368588] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1581.368884] env[63379]: DEBUG nova.virt.hardware [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1581.370190] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92dd73e5-a830-4e41-b41e-bd0b36f52644 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.379303] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cb9cb6-3fa4-45f4-b98e-67c0621dc322 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.394479] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.394756] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.394969] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.395172] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.395384] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.403643] env[63379]: INFO nova.compute.manager [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Terminating instance [ 1581.404791] env[63379]: DEBUG nova.compute.manager [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1581.404992] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1581.406684] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e08763-1dcf-454b-a891-ecc979f0d023 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.412672] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779481, 'name': CreateVM_Task, 'duration_secs': 0.581535} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.413190] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1581.413887] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.414061] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.414419] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1581.418026] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80258a2c-331a-4097-a0e8-59ab0d8227bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.418507] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1581.419096] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-094ef603-2451-4a85-b422-c6b41b99a9b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.422432] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1581.422432] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5226a720-9acc-cb63-3601-d564dddd43d4" [ 1581.422432] env[63379]: _type = "Task" [ 1581.422432] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.430023] env[63379]: DEBUG oslo_vmware.api [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1581.430023] env[63379]: value = "task-1779483" [ 1581.430023] env[63379]: _type = "Task" [ 1581.430023] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.434836] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5226a720-9acc-cb63-3601-d564dddd43d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.444037] env[63379]: DEBUG oslo_vmware.api [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1779483, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.454814] env[63379]: DEBUG nova.compute.manager [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received event network-changed-3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1581.454814] env[63379]: DEBUG nova.compute.manager [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Refreshing instance network info cache due to event network-changed-3538ffcb-51cd-414b-ad0e-080a6e1ff138. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1581.454814] env[63379]: DEBUG oslo_concurrency.lockutils [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] Acquiring lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.454814] env[63379]: DEBUG oslo_concurrency.lockutils [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] Acquired lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.454814] env[63379]: DEBUG nova.network.neutron [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Refreshing network info cache for port 3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1581.516872] env[63379]: DEBUG nova.scheduler.client.report [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1581.827545] env[63379]: DEBUG nova.network.neutron [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Updated VIF entry in instance network info cache for port a8926575-6550-43c6-b23d-a15787ee76c0. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1581.827545] env[63379]: DEBUG nova.network.neutron [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Updating instance_info_cache with network_info: [{"id": "a8926575-6550-43c6-b23d-a15787ee76c0", "address": "fa:16:3e:c2:d2:b4", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8926575-65", "ovs_interfaceid": "a8926575-6550-43c6-b23d-a15787ee76c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.884818] env[63379]: DEBUG nova.network.neutron [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.938678] env[63379]: DEBUG oslo_vmware.api [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1779483, 'name': PowerOffVM_Task, 'duration_secs': 0.330922} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.942990] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1581.943400] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1581.943825] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5226a720-9acc-cb63-3601-d564dddd43d4, 'name': SearchDatastore_Task, 'duration_secs': 0.021054} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.946668] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea8a7b8f-0524-4149-a504-4d5890e22e8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.948951] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.949307] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1581.949665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.949934] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.950266] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1581.950742] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a03a031-6053-4829-8ae1-8de9717a7889 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.961986] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1581.962472] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1581.963393] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ce40ea4-e892-469c-ad29-b2420ac14fb5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.972020] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1581.972020] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522f657e-7632-64ef-1c4f-976250ae3cf8" [ 1581.972020] env[63379]: _type = "Task" [ 1581.972020] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.981667] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522f657e-7632-64ef-1c4f-976250ae3cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.024767] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.025318] env[63379]: DEBUG nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1582.028410] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.068s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.028644] env[63379]: DEBUG nova.objects.instance [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lazy-loading 'resources' on Instance uuid 5aa36799-251b-4933-8ccd-8125995b1f8b {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1582.070280] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1582.070280] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1582.070447] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Deleting the datastore file [datastore1] d47be684-6cd8-45c6-8c6a-9a6db0390f97 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1582.070672] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ff1b363-7afe-477e-96ad-c5fe0b57752b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.081612] env[63379]: DEBUG oslo_vmware.api [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for the task: (returnval){ [ 1582.081612] env[63379]: value = "task-1779485" [ 1582.081612] env[63379]: _type = "Task" [ 1582.081612] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.093480] env[63379]: DEBUG oslo_vmware.api [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1779485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.162381] env[63379]: DEBUG nova.network.neutron [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updating instance_info_cache with network_info: [{"id": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "address": "fa:16:3e:7a:aa:83", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2cd71e-08", "ovs_interfaceid": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.333164] env[63379]: DEBUG oslo_concurrency.lockutils [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] Releasing lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.333548] env[63379]: DEBUG nova.compute.manager [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Received event network-changed-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1582.333652] env[63379]: DEBUG nova.compute.manager [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Refreshing instance network info cache due to event network-changed-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1582.333893] env[63379]: DEBUG oslo_concurrency.lockutils [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] Acquiring lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.359690] env[63379]: DEBUG nova.network.neutron [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updated VIF entry in instance network info cache for port 3538ffcb-51cd-414b-ad0e-080a6e1ff138. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1582.360085] env[63379]: DEBUG nova.network.neutron [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.390380] env[63379]: DEBUG oslo_concurrency.lockutils [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Releasing lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.390380] env[63379]: DEBUG nova.compute.manager [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Inject network info {{(pid=63379) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 1582.390521] env[63379]: DEBUG nova.compute.manager [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] network_info to inject: |[{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7266}} [ 1582.395906] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Reconfiguring VM instance to set the machine id {{(pid=63379) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1582.396517] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f28a496d-bcc3-4a03-8529-544b8ef46d54 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.416176] env[63379]: DEBUG oslo_vmware.api [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1582.416176] env[63379]: value = "task-1779486" [ 1582.416176] env[63379]: _type = "Task" [ 1582.416176] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.425847] env[63379]: DEBUG oslo_vmware.api [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779486, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.482915] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522f657e-7632-64ef-1c4f-976250ae3cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.018614} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.483914] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94306bb0-bab9-4caa-b6e4-f478713e76d3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.492147] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1582.492147] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d00fc4-70a2-e2cb-4ee1-299f8d3db152" [ 1582.492147] env[63379]: _type = "Task" [ 1582.492147] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.501778] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d00fc4-70a2-e2cb-4ee1-299f8d3db152, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.532019] env[63379]: DEBUG nova.compute.utils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1582.537250] env[63379]: DEBUG nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1582.537330] env[63379]: DEBUG nova.network.neutron [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1582.591243] env[63379]: DEBUG nova.policy [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a52cb7db81d24a8faddcb40308665627', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50144e7fcb0642d7a1d1514f2233f555', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1582.605195] env[63379]: DEBUG oslo_vmware.api [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Task: {'id': task-1779485, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.464165} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.608020] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1582.608251] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1582.608440] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1582.608751] env[63379]: INFO nova.compute.manager [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1582.609077] env[63379]: DEBUG oslo.service.loopingcall [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1582.609478] env[63379]: DEBUG nova.compute.manager [-] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1582.609580] env[63379]: DEBUG nova.network.neutron [-] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1582.668228] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Releasing lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.670293] env[63379]: DEBUG oslo_concurrency.lockutils [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] Acquired lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.670451] env[63379]: DEBUG nova.network.neutron [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Refreshing network info cache for port 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1582.671958] env[63379]: DEBUG nova.compute.manager [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1582.673477] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de14806-025b-4492-a453-842cffa60c84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.863606] env[63379]: DEBUG oslo_concurrency.lockutils [req-ac328aba-2031-43cf-9525-26209b4e81c9 req-d71da642-3c48-4d0d-8818-cda42893c980 service nova] Releasing lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.930282] env[63379]: DEBUG oslo_vmware.api [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779486, 'name': ReconfigVM_Task, 'duration_secs': 0.17805} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.930493] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-11470ca2-3cd5-451c-90d4-c2fba44c1c3f tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Reconfigured VM instance to set the machine id {{(pid=63379) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1583.004936] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d00fc4-70a2-e2cb-4ee1-299f8d3db152, 'name': SearchDatastore_Task, 'duration_secs': 0.01215} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.008274] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.008557] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 38be0e8d-188b-4a98-aedc-5d941b63c000/38be0e8d-188b-4a98-aedc-5d941b63c000.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.009400] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dd495c9-9c32-4307-a830-3c2b560810b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.018890] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1583.018890] env[63379]: value = "task-1779487" [ 1583.018890] env[63379]: _type = "Task" [ 1583.018890] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.036282] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779487, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.037809] env[63379]: DEBUG nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1583.130734] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.131329] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.131595] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.131810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.132013] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.135619] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc26adf-0d71-4768-b70a-57cf2a9dce4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.139121] env[63379]: INFO nova.compute.manager [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Terminating instance [ 1583.141245] env[63379]: DEBUG nova.compute.manager [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1583.141748] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1583.142283] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b003dd81-624e-495c-b279-c1ee63ca187b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.153023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902ffb1a-107f-41b7-923a-a55835336f7f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.156573] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1583.157434] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e3876dc-6e09-48ac-9b59-868d7726c062 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.196899] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0aa1644-9846-416c-98eb-09dce7b09ff5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.205122] env[63379]: DEBUG oslo_vmware.api [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1583.205122] env[63379]: value = "task-1779488" [ 1583.205122] env[63379]: _type = "Task" [ 1583.205122] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.213334] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe7d4e0-c876-45bc-8a68-d9739821bdfc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.222629] env[63379]: DEBUG oslo_vmware.api [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.235135] env[63379]: DEBUG nova.compute.provider_tree [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1583.275178] env[63379]: INFO nova.compute.manager [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Unrescuing [ 1583.275178] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.275178] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.275306] env[63379]: DEBUG nova.network.neutron [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1583.288919] env[63379]: DEBUG nova.compute.manager [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Received event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1583.289237] env[63379]: DEBUG nova.compute.manager [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing instance network info cache due to event network-changed-d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1583.289479] env[63379]: DEBUG oslo_concurrency.lockutils [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] Acquiring lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.289639] env[63379]: DEBUG oslo_concurrency.lockutils [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] Acquired lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.289912] env[63379]: DEBUG nova.network.neutron [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Refreshing network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1583.496396] env[63379]: DEBUG nova.network.neutron [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Successfully created port: 6a39414d-cc4f-4a85-997b-d633aec0bcef {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1583.533042] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779487, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.659759] env[63379]: DEBUG nova.network.neutron [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Successfully updated port: 8c5374c2-6a00-48c8-846d-94d7f695d456 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.707462] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1dab18-3aa5-4616-b910-d92c3398ccfb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.722458] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Doing hard reboot of VM {{(pid=63379) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1583.729192] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-d90f9450-c912-4aad-9d51-af3b1fca25ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.733695] env[63379]: DEBUG oslo_vmware.api [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779488, 'name': PowerOffVM_Task, 'duration_secs': 0.41433} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.733695] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1583.733695] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1583.733695] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-823d3bf3-2e66-405a-87db-3ee252257e10 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.740346] env[63379]: DEBUG nova.scheduler.client.report [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1583.750435] env[63379]: DEBUG oslo_vmware.api [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1583.750435] env[63379]: value = "task-1779489" [ 1583.750435] env[63379]: _type = "Task" [ 1583.750435] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.771548] env[63379]: DEBUG oslo_vmware.api [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779489, 'name': ResetVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.846726] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1583.846932] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1583.847212] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Deleting the datastore file [datastore1] aedff32b-b0c2-4a93-a2c6-349d26839cc4 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1583.850201] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29aa6f8f-78e2-4799-b1f7-7ed596782f29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.860342] env[63379]: DEBUG oslo_vmware.api [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for the task: (returnval){ [ 1583.860342] env[63379]: value = "task-1779491" [ 1583.860342] env[63379]: _type = "Task" [ 1583.860342] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.864571] env[63379]: DEBUG nova.network.neutron [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updated VIF entry in instance network info cache for port 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1583.864936] env[63379]: DEBUG nova.network.neutron [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updating instance_info_cache with network_info: [{"id": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "address": "fa:16:3e:7a:aa:83", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2cd71e-08", "ovs_interfaceid": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1583.878037] env[63379]: DEBUG oslo_vmware.api [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.032598] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779487, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.776874} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.032871] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 38be0e8d-188b-4a98-aedc-5d941b63c000/38be0e8d-188b-4a98-aedc-5d941b63c000.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1584.033693] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1584.034223] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e72005ed-24a2-4970-948b-561ab1abe891 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.042770] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1584.042770] env[63379]: value = "task-1779492" [ 1584.042770] env[63379]: _type = "Task" [ 1584.042770] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.052919] env[63379]: DEBUG nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1584.060561] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779492, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.083520] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1584.083809] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1584.084410] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1584.084410] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1584.084410] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1584.084583] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1584.084675] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1584.084831] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1584.085012] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1584.085186] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1584.085395] env[63379]: DEBUG nova.virt.hardware [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1584.086259] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af43f3aa-5722-40cb-86b9-0d147ba2692b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.096359] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c713cd-64ed-4848-afc5-e25d6d68cc52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.164213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "refresh_cache-ec1f7a44-7344-43fb-9d51-688731d8ce14" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.164213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "refresh_cache-ec1f7a44-7344-43fb-9d51-688731d8ce14" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.164213] env[63379]: DEBUG nova.network.neutron [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1584.180666] env[63379]: DEBUG nova.compute.manager [req-77916d4b-8742-4cf7-8e6b-d6178b78c7d3 req-22275c1b-78f7-4ede-a47b-793d5dbb222d service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Received event network-vif-plugged-8c5374c2-6a00-48c8-846d-94d7f695d456 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1584.180903] env[63379]: DEBUG oslo_concurrency.lockutils [req-77916d4b-8742-4cf7-8e6b-d6178b78c7d3 req-22275c1b-78f7-4ede-a47b-793d5dbb222d service nova] Acquiring lock "ec1f7a44-7344-43fb-9d51-688731d8ce14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.181133] env[63379]: DEBUG oslo_concurrency.lockutils [req-77916d4b-8742-4cf7-8e6b-d6178b78c7d3 req-22275c1b-78f7-4ede-a47b-793d5dbb222d service nova] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.181312] env[63379]: DEBUG oslo_concurrency.lockutils [req-77916d4b-8742-4cf7-8e6b-d6178b78c7d3 req-22275c1b-78f7-4ede-a47b-793d5dbb222d service nova] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.181526] env[63379]: DEBUG nova.compute.manager [req-77916d4b-8742-4cf7-8e6b-d6178b78c7d3 req-22275c1b-78f7-4ede-a47b-793d5dbb222d service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] No waiting events found dispatching network-vif-plugged-8c5374c2-6a00-48c8-846d-94d7f695d456 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1584.181650] env[63379]: WARNING nova.compute.manager [req-77916d4b-8742-4cf7-8e6b-d6178b78c7d3 req-22275c1b-78f7-4ede-a47b-793d5dbb222d service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Received unexpected event network-vif-plugged-8c5374c2-6a00-48c8-846d-94d7f695d456 for instance with vm_state building and task_state spawning. [ 1584.252516] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.224s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.264122] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.979s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.269234] env[63379]: INFO nova.compute.claims [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1584.289115] env[63379]: DEBUG oslo_vmware.api [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779489, 'name': ResetVM_Task, 'duration_secs': 0.113634} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.290853] env[63379]: INFO nova.scheduler.client.report [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Deleted allocations for instance 5aa36799-251b-4933-8ccd-8125995b1f8b [ 1584.292753] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Did hard reboot of VM {{(pid=63379) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1584.292753] env[63379]: DEBUG nova.compute.manager [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1584.300674] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7123a0ef-7237-4485-9d3d-89500f20cd1d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.373673] env[63379]: DEBUG oslo_concurrency.lockutils [req-cab3d0e3-f6d0-433c-9ef0-3d3f8e146cc0 req-44a4536a-5950-4c18-b035-9b39703a8414 service nova] Releasing lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.374247] env[63379]: DEBUG oslo_vmware.api [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Task: {'id': task-1779491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252317} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.374478] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1584.374664] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1584.374837] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1584.375382] env[63379]: INFO nova.compute.manager [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1584.375765] env[63379]: DEBUG oslo.service.loopingcall [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1584.376023] env[63379]: DEBUG nova.compute.manager [-] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1584.376130] env[63379]: DEBUG nova.network.neutron [-] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1584.405698] env[63379]: DEBUG nova.network.neutron [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Updating instance_info_cache with network_info: [{"id": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "address": "fa:16:3e:01:39:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e6b3d77-6a", "ovs_interfaceid": "8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.421915] env[63379]: DEBUG nova.network.neutron [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updated VIF entry in instance network info cache for port d994b910-f078-4d71-a9e5-f3177a54dfef. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1584.421915] env[63379]: DEBUG nova.network.neutron [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [{"id": "d994b910-f078-4d71-a9e5-f3177a54dfef", "address": "fa:16:3e:45:dc:63", "network": {"id": "6f6e9d87-2ff1-4f28-8e25-27bb57119d5d", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-455291752-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fadf3c8628840efb6c8f6f99df21694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd994b910-f0", "ovs_interfaceid": "d994b910-f078-4d71-a9e5-f3177a54dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.562713] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779492, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070691} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.562713] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1584.564898] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42244de-c639-4434-9a8f-28585d07bab6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.603848] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 38be0e8d-188b-4a98-aedc-5d941b63c000/38be0e8d-188b-4a98-aedc-5d941b63c000.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1584.605056] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f970fb07-432a-4a50-a537-93f1e60ec545 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.628149] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1584.628149] env[63379]: value = "task-1779493" [ 1584.628149] env[63379]: _type = "Task" [ 1584.628149] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.638379] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779493, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.725795] env[63379]: DEBUG nova.network.neutron [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1584.743238] env[63379]: DEBUG nova.network.neutron [-] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.815122] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0fdab32e-a2dc-4a7e-85d5-45265285a3a8 tempest-ServerPasswordTestJSON-1230001386 tempest-ServerPasswordTestJSON-1230001386-project-member] Lock "5aa36799-251b-4933-8ccd-8125995b1f8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.062s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.822831] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7fa858f3-822a-4369-9c42-d26ab769da50 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.382s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.909034] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-1d2de9da-9dfe-42d2-b206-bb5139b1970b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.909256] env[63379]: DEBUG nova.objects.instance [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lazy-loading 'flavor' on Instance uuid 1d2de9da-9dfe-42d2-b206-bb5139b1970b {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1584.927515] env[63379]: DEBUG oslo_concurrency.lockutils [req-8eac57f4-9689-4e09-8104-11e6337e7441 req-c2e1ea88-8fad-42e4-b0d0-a52733fdf79b service nova] Releasing lock "refresh_cache-aedff32b-b0c2-4a93-a2c6-349d26839cc4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.143483] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.159998] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Acquiring lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.160362] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.160606] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Acquiring lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.160858] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.161042] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.163084] env[63379]: INFO nova.compute.manager [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Terminating instance [ 1585.165135] env[63379]: DEBUG nova.compute.manager [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1585.165354] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1585.165621] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce1933eb-7f3a-42c5-9a68-ea89d7fa083f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.173778] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1585.173778] env[63379]: value = "task-1779494" [ 1585.173778] env[63379]: _type = "Task" [ 1585.173778] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.184398] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.246013] env[63379]: INFO nova.compute.manager [-] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Took 2.64 seconds to deallocate network for instance. [ 1585.274540] env[63379]: DEBUG nova.network.neutron [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Updating instance_info_cache with network_info: [{"id": "8c5374c2-6a00-48c8-846d-94d7f695d456", "address": "fa:16:3e:ab:fc:8d", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5374c2-6a", "ovs_interfaceid": "8c5374c2-6a00-48c8-846d-94d7f695d456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.419041] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a959d5-3bcf-4b4d-9c74-2036765b105a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.444059] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1585.447052] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82c83318-9e55-4277-a7fd-4a62a7d34eae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.456861] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1585.456861] env[63379]: value = "task-1779495" [ 1585.456861] env[63379]: _type = "Task" [ 1585.456861] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.470096] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.521200] env[63379]: DEBUG nova.compute.manager [req-545f245a-fcc3-434e-a33d-eb314f9f2e7c req-24d74cdb-aa68-4950-8160-7e79334ede46 service nova] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Received event network-vif-deleted-775d733a-ad5b-4b39-aa69-8b4a577c4348 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1585.521468] env[63379]: DEBUG nova.compute.manager [req-545f245a-fcc3-434e-a33d-eb314f9f2e7c req-24d74cdb-aa68-4950-8160-7e79334ede46 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Received event network-vif-deleted-d994b910-f078-4d71-a9e5-f3177a54dfef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1585.521732] env[63379]: INFO nova.compute.manager [req-545f245a-fcc3-434e-a33d-eb314f9f2e7c req-24d74cdb-aa68-4950-8160-7e79334ede46 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Neutron deleted interface d994b910-f078-4d71-a9e5-f3177a54dfef; detaching it from the instance and deleting it from the info cache [ 1585.522035] env[63379]: DEBUG nova.network.neutron [req-545f245a-fcc3-434e-a33d-eb314f9f2e7c req-24d74cdb-aa68-4950-8160-7e79334ede46 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.569905] env[63379]: DEBUG nova.network.neutron [-] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.649240] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779493, 'name': ReconfigVM_Task, 'duration_secs': 0.755085} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.652526] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 38be0e8d-188b-4a98-aedc-5d941b63c000/38be0e8d-188b-4a98-aedc-5d941b63c000.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1585.652526] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63379) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1585.654865] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-394239c6-ecec-40b6-8d41-5405b5066c36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.667565] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1585.667565] env[63379]: value = "task-1779496" [ 1585.667565] env[63379]: _type = "Task" [ 1585.667565] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.684795] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779496, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.690570] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779494, 'name': PowerOffVM_Task, 'duration_secs': 0.270187} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.691959] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1585.691959] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1585.691959] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369228', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'name': 'volume-263e34b9-3753-4240-8bd6-67c4019d79ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '15d19ce3-ea71-47ff-a738-9ba00b8dfcf1', 'attached_at': '', 'detached_at': '', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'serial': '263e34b9-3753-4240-8bd6-67c4019d79ae'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1585.696026] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdeae7f-29df-4b3a-8874-489bbaf0f699 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.717900] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3828619a-41f2-433f-a35b-da66e41536dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.731116] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9dcb622-66bb-46c2-abd1-99cad748c198 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.757192] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.761095] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b85043-ccb2-429e-b0b1-9766b1bc6d02 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.779871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "refresh_cache-ec1f7a44-7344-43fb-9d51-688731d8ce14" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.781282] env[63379]: DEBUG nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Instance network_info: |[{"id": "8c5374c2-6a00-48c8-846d-94d7f695d456", "address": "fa:16:3e:ab:fc:8d", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5374c2-6a", "ovs_interfaceid": "8c5374c2-6a00-48c8-846d-94d7f695d456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1585.781282] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] The volume has not been displaced from its original location: [datastore1] volume-263e34b9-3753-4240-8bd6-67c4019d79ae/volume-263e34b9-3753-4240-8bd6-67c4019d79ae.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1585.788787] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Reconfiguring VM instance instance-00000010 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1585.790279] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:fc:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c5374c2-6a00-48c8-846d-94d7f695d456', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1585.797974] env[63379]: DEBUG oslo.service.loopingcall [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.799370] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8393c77a-77bb-452f-a8af-b0c2cee941d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.813816] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1585.816056] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1eecf672-e59b-4235-8cd5-412a4cc2258c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.834702] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.835095] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.835208] env[63379]: DEBUG nova.compute.manager [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1585.837206] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ddba65-df2a-45ae-a80c-074986d8481d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.842156] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1585.842156] env[63379]: value = "task-1779497" [ 1585.842156] env[63379]: _type = "Task" [ 1585.842156] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.848524] env[63379]: DEBUG nova.compute.manager [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1585.849203] env[63379]: DEBUG nova.objects.instance [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'flavor' on Instance uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.850385] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1585.850385] env[63379]: value = "task-1779498" [ 1585.850385] env[63379]: _type = "Task" [ 1585.850385] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.863755] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779497, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.871832] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779498, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.941281] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbf706d-c340-4f8e-8e74-5bb3c45cbc86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.950374] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d67180f-9826-4c6b-9d02-2a138df89cfe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.991886] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d7348b-9dff-4fe4-adaa-2656987ab7d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.999987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef674760-b906-4905-8664-058e833c24b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.004802] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779495, 'name': PowerOffVM_Task, 'duration_secs': 0.221703} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.008401] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1586.010455] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Reconfiguring VM instance instance-00000031 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1586.011277] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07b19efa-8d37-421b-b155-159e7c8e56d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.033515] env[63379]: DEBUG nova.compute.provider_tree [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.035419] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05f9b99d-4c22-4b77-b160-e9b80093eac6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.046623] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1586.046623] env[63379]: value = "task-1779499" [ 1586.046623] env[63379]: _type = "Task" [ 1586.046623] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.048239] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b5c943-aaec-478c-8aa2-6c17f7b80041 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.072284] env[63379]: INFO nova.compute.manager [-] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Took 1.70 seconds to deallocate network for instance. [ 1586.072655] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779499, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.097023] env[63379]: DEBUG nova.compute.manager [req-545f245a-fcc3-434e-a33d-eb314f9f2e7c req-24d74cdb-aa68-4950-8160-7e79334ede46 service nova] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Detach interface failed, port_id=d994b910-f078-4d71-a9e5-f3177a54dfef, reason: Instance aedff32b-b0c2-4a93-a2c6-349d26839cc4 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1586.158844] env[63379]: DEBUG nova.network.neutron [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Successfully updated port: 6a39414d-cc4f-4a85-997b-d633aec0bcef {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1586.185570] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779496, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.070266} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.186286] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63379) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1586.186762] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f496f36-ec86-4e04-b83a-6071493b3f4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.217659] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 38be0e8d-188b-4a98-aedc-5d941b63c000/ephemeral_0.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1586.218099] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4f720ef-a33b-4f4d-b84f-f50332bb5a60 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.238818] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1586.238818] env[63379]: value = "task-1779500" [ 1586.238818] env[63379]: _type = "Task" [ 1586.238818] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.248937] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779500, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.360549] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779497, 'name': ReconfigVM_Task, 'duration_secs': 0.213152} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.360549] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1586.367072] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Reconfigured VM instance instance-00000010 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1586.373287] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7f82d12-8cc8-4b11-a355-13dfc24bebbb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.376095] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4b964f2-3aa3-4430-a74c-6bb610486d26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.394706] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779498, 'name': CreateVM_Task, 'duration_secs': 0.456408} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.397110] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1586.397311] env[63379]: DEBUG oslo_vmware.api [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1586.397311] env[63379]: value = "task-1779501" [ 1586.397311] env[63379]: _type = "Task" [ 1586.397311] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.397701] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1586.397701] env[63379]: value = "task-1779502" [ 1586.397701] env[63379]: _type = "Task" [ 1586.397701] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.398244] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.398407] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.399128] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1586.399128] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a204046-f5b6-469d-be1a-0706cecd3d2e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.420857] env[63379]: DEBUG oslo_vmware.api [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.422987] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1586.422987] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529130f3-5794-73fb-8e21-ecd31ca040a7" [ 1586.422987] env[63379]: _type = "Task" [ 1586.422987] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.422987] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779502, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.437988] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529130f3-5794-73fb-8e21-ecd31ca040a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.489693] env[63379]: DEBUG nova.compute.manager [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Received event network-changed-8c5374c2-6a00-48c8-846d-94d7f695d456 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1586.489889] env[63379]: DEBUG nova.compute.manager [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Refreshing instance network info cache due to event network-changed-8c5374c2-6a00-48c8-846d-94d7f695d456. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1586.490066] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] Acquiring lock "refresh_cache-ec1f7a44-7344-43fb-9d51-688731d8ce14" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.490221] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] Acquired lock "refresh_cache-ec1f7a44-7344-43fb-9d51-688731d8ce14" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.490397] env[63379]: DEBUG nova.network.neutron [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Refreshing network info cache for port 8c5374c2-6a00-48c8-846d-94d7f695d456 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1586.542938] env[63379]: DEBUG nova.scheduler.client.report [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1586.559294] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779499, 'name': ReconfigVM_Task, 'duration_secs': 0.331013} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.560721] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Reconfigured VM instance instance-00000031 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1586.560721] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1586.560721] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f04037b5-3ee9-4ed8-84f1-484ab1e21972 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.568512] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1586.568512] env[63379]: value = "task-1779503" [ 1586.568512] env[63379]: _type = "Task" [ 1586.568512] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.579457] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.598926] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.663944] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "refresh_cache-2a996f06-542e-4f71-95a4-0f71097d1478" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.663944] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "refresh_cache-2a996f06-542e-4f71-95a4-0f71097d1478" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.663944] env[63379]: DEBUG nova.network.neutron [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1586.749765] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.915562] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779502, 'name': ReconfigVM_Task, 'duration_secs': 0.220288} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.919656] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369228', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'name': 'volume-263e34b9-3753-4240-8bd6-67c4019d79ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '15d19ce3-ea71-47ff-a738-9ba00b8dfcf1', 'attached_at': '', 'detached_at': '', 'volume_id': '263e34b9-3753-4240-8bd6-67c4019d79ae', 'serial': '263e34b9-3753-4240-8bd6-67c4019d79ae'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1586.919816] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1586.920324] env[63379]: DEBUG oslo_vmware.api [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779501, 'name': PowerOffVM_Task, 'duration_secs': 0.217642} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.921485] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e81ce7-42db-4ea7-a5e3-95706561778d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.925051] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1586.925978] env[63379]: DEBUG nova.compute.manager [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1586.926714] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abf2c05-e9db-4985-8eac-32d2d9675e15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.941101] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1586.946539] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f65551c5-83a6-4ed9-a516-9e58f4d02a58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.948600] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529130f3-5794-73fb-8e21-ecd31ca040a7, 'name': SearchDatastore_Task, 'duration_secs': 0.027144} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.951628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.952091] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1586.952461] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.953337] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.953337] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1586.954067] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caca4b3f-d254-4b29-9fcd-f7599b790060 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.966512] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1586.966951] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1586.967761] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c312ad4-f60d-44c8-b70d-db8161a0c0f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.974393] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1586.974393] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f4158-1d96-9a09-b8f6-1f59742c9a7a" [ 1586.974393] env[63379]: _type = "Task" [ 1586.974393] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.985267] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f4158-1d96-9a09-b8f6-1f59742c9a7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.043725] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1587.043891] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1587.044157] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Deleting the datastore file [datastore1] 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1587.044558] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b980f2d-7648-4b16-b1c7-9866193b64a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.048017] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.791s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.048554] env[63379]: DEBUG nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1587.051237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.729s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.052980] env[63379]: INFO nova.compute.claims [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1587.062379] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for the task: (returnval){ [ 1587.062379] env[63379]: value = "task-1779505" [ 1587.062379] env[63379]: _type = "Task" [ 1587.062379] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.073028] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.082730] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779503, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.206093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.206424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.206741] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "6e022c9a-642b-4d96-8195-e56809bbd7b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.206968] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.207184] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.209587] env[63379]: DEBUG nova.network.neutron [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1587.213020] env[63379]: INFO nova.compute.manager [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Terminating instance [ 1587.215163] env[63379]: DEBUG nova.compute.manager [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1587.215380] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1587.216333] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2cec09-d3e6-414a-8f33-89ea8b77994b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.231099] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1587.233924] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-306de0d2-4f80-48cc-9f2a-1667f8680157 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.241485] env[63379]: DEBUG oslo_vmware.api [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1587.241485] env[63379]: value = "task-1779506" [ 1587.241485] env[63379]: _type = "Task" [ 1587.241485] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.256109] env[63379]: DEBUG oslo_vmware.api [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.259416] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779500, 'name': ReconfigVM_Task, 'duration_secs': 0.70277} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.259703] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 38be0e8d-188b-4a98-aedc-5d941b63c000/ephemeral_0.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1587.260411] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e912708-b84e-4220-870a-ba0c2df5aa50 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.265740] env[63379]: DEBUG nova.network.neutron [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Updated VIF entry in instance network info cache for port 8c5374c2-6a00-48c8-846d-94d7f695d456. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1587.266103] env[63379]: DEBUG nova.network.neutron [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Updating instance_info_cache with network_info: [{"id": "8c5374c2-6a00-48c8-846d-94d7f695d456", "address": "fa:16:3e:ab:fc:8d", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5374c2-6a", "ovs_interfaceid": "8c5374c2-6a00-48c8-846d-94d7f695d456", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.268598] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1587.268598] env[63379]: value = "task-1779507" [ 1587.268598] env[63379]: _type = "Task" [ 1587.268598] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.279176] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779507, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.453043] env[63379]: DEBUG nova.network.neutron [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Updating instance_info_cache with network_info: [{"id": "6a39414d-cc4f-4a85-997b-d633aec0bcef", "address": "fa:16:3e:a4:37:f2", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a39414d-cc", "ovs_interfaceid": "6a39414d-cc4f-4a85-997b-d633aec0bcef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.457058] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e1e3e91-0c59-4449-bf04-64df88bd3901 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.622s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.487803] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528f4158-1d96-9a09-b8f6-1f59742c9a7a, 'name': SearchDatastore_Task, 'duration_secs': 0.010339} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.489177] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cea4e70-5fc5-4c16-91fa-b735dd357fe4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.496520] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1587.496520] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521a86e0-7433-f7c1-b9b5-4f84d22f7a01" [ 1587.496520] env[63379]: _type = "Task" [ 1587.496520] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.509285] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521a86e0-7433-f7c1-b9b5-4f84d22f7a01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.558031] env[63379]: DEBUG nova.compute.utils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1587.563256] env[63379]: DEBUG nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Not allocating networking since 'none' was specified. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1587.574829] env[63379]: DEBUG oslo_vmware.api [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Task: {'id': task-1779505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167848} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.579034] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1587.579329] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1587.579440] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1587.579611] env[63379]: INFO nova.compute.manager [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Took 2.41 seconds to destroy the instance on the hypervisor. [ 1587.579866] env[63379]: DEBUG oslo.service.loopingcall [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1587.580358] env[63379]: DEBUG nova.compute.manager [-] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1587.580566] env[63379]: DEBUG nova.network.neutron [-] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1587.587859] env[63379]: DEBUG oslo_vmware.api [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779503, 'name': PowerOnVM_Task, 'duration_secs': 0.61257} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.588175] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1587.588420] env[63379]: DEBUG nova.compute.manager [None req-d3a459bd-7378-40a3-af6e-1dcb6f18ce40 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1587.589249] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fbc34b-70c4-4606-8872-ab6b86073c11 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.635145] env[63379]: DEBUG nova.compute.manager [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Received event network-vif-plugged-6a39414d-cc4f-4a85-997b-d633aec0bcef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1587.635393] env[63379]: DEBUG oslo_concurrency.lockutils [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] Acquiring lock "2a996f06-542e-4f71-95a4-0f71097d1478-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.635763] env[63379]: DEBUG oslo_concurrency.lockutils [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] Lock "2a996f06-542e-4f71-95a4-0f71097d1478-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.636535] env[63379]: DEBUG oslo_concurrency.lockutils [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] Lock "2a996f06-542e-4f71-95a4-0f71097d1478-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.636771] env[63379]: DEBUG nova.compute.manager [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] No waiting events found dispatching network-vif-plugged-6a39414d-cc4f-4a85-997b-d633aec0bcef {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1587.636960] env[63379]: WARNING nova.compute.manager [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Received unexpected event network-vif-plugged-6a39414d-cc4f-4a85-997b-d633aec0bcef for instance with vm_state building and task_state spawning. [ 1587.637142] env[63379]: DEBUG nova.compute.manager [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Received event network-changed-6a39414d-cc4f-4a85-997b-d633aec0bcef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1587.637301] env[63379]: DEBUG nova.compute.manager [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Refreshing instance network info cache due to event network-changed-6a39414d-cc4f-4a85-997b-d633aec0bcef. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1587.637472] env[63379]: DEBUG oslo_concurrency.lockutils [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] Acquiring lock "refresh_cache-2a996f06-542e-4f71-95a4-0f71097d1478" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.760197] env[63379]: DEBUG oslo_vmware.api [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779506, 'name': PowerOffVM_Task, 'duration_secs': 0.381972} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.760197] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1587.760197] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1587.760197] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da0919be-300e-403d-97e6-bb98b1ae7a37 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.772910] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] Releasing lock "refresh_cache-ec1f7a44-7344-43fb-9d51-688731d8ce14" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.772910] env[63379]: DEBUG nova.compute.manager [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Received event network-changed-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1587.772910] env[63379]: DEBUG nova.compute.manager [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Refreshing instance network info cache due to event network-changed-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1587.772910] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] Acquiring lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.772910] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] Acquired lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.772910] env[63379]: DEBUG nova.network.neutron [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Refreshing network info cache for port 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1587.788113] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779507, 'name': Rename_Task, 'duration_secs': 0.343447} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.789224] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1587.789512] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4abb405d-7794-4501-af14-8d5e401dc7d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.797994] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1587.797994] env[63379]: value = "task-1779509" [ 1587.797994] env[63379]: _type = "Task" [ 1587.797994] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.811412] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.878396] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1587.878396] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1587.878396] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Deleting the datastore file [datastore1] 6e022c9a-642b-4d96-8195-e56809bbd7b9 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1587.878565] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f32bce3b-1819-4cea-96fd-bb76002504c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.888601] env[63379]: DEBUG oslo_vmware.api [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1587.888601] env[63379]: value = "task-1779510" [ 1587.888601] env[63379]: _type = "Task" [ 1587.888601] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.899202] env[63379]: DEBUG oslo_vmware.api [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.959374] env[63379]: DEBUG nova.objects.instance [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'flavor' on Instance uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1587.959374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "refresh_cache-2a996f06-542e-4f71-95a4-0f71097d1478" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.959374] env[63379]: DEBUG nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Instance network_info: |[{"id": "6a39414d-cc4f-4a85-997b-d633aec0bcef", "address": "fa:16:3e:a4:37:f2", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a39414d-cc", "ovs_interfaceid": "6a39414d-cc4f-4a85-997b-d633aec0bcef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1587.960190] env[63379]: DEBUG oslo_concurrency.lockutils [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] Acquired lock "refresh_cache-2a996f06-542e-4f71-95a4-0f71097d1478" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.960652] env[63379]: DEBUG nova.network.neutron [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Refreshing network info cache for port 6a39414d-cc4f-4a85-997b-d633aec0bcef {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1587.962282] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:37:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a39414d-cc4f-4a85-997b-d633aec0bcef', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1587.971256] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Creating folder: Project (50144e7fcb0642d7a1d1514f2233f555). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1587.973645] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1dc0503c-6c87-41d2-aadb-2ae957fa6327 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.987302] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Created folder: Project (50144e7fcb0642d7a1d1514f2233f555) in parent group-v369214. [ 1587.987651] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Creating folder: Instances. Parent ref: group-v369376. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1587.988012] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90bf0c73-4ec6-4708-ad28-70df5bd68457 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.003082] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Created folder: Instances in parent group-v369376. [ 1588.003382] env[63379]: DEBUG oslo.service.loopingcall [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.003985] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1588.004228] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c690806-b636-4423-9eb6-9773c201f41b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.027241] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521a86e0-7433-f7c1-b9b5-4f84d22f7a01, 'name': SearchDatastore_Task, 'duration_secs': 0.01415} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.028126] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.028404] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ec1f7a44-7344-43fb-9d51-688731d8ce14/ec1f7a44-7344-43fb-9d51-688731d8ce14.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1588.028688] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bec979c7-fe73-4c91-bac2-d404497b6d16 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.034132] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1588.034132] env[63379]: value = "task-1779513" [ 1588.034132] env[63379]: _type = "Task" [ 1588.034132] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.038808] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1588.038808] env[63379]: value = "task-1779514" [ 1588.038808] env[63379]: _type = "Task" [ 1588.038808] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.048663] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779513, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.052741] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.064026] env[63379]: DEBUG nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1588.315524] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779509, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.403167] env[63379]: DEBUG oslo_vmware.api [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247555} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.403167] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1588.403167] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1588.403167] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1588.403167] env[63379]: INFO nova.compute.manager [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1588.403167] env[63379]: DEBUG oslo.service.loopingcall [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.403580] env[63379]: DEBUG nova.compute.manager [-] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1588.403580] env[63379]: DEBUG nova.network.neutron [-] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1588.478470] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.478714] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.478817] env[63379]: DEBUG nova.network.neutron [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1588.479019] env[63379]: DEBUG nova.objects.instance [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'info_cache' on Instance uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1588.546136] env[63379]: DEBUG nova.compute.manager [req-1d5f191b-1838-477f-b7f2-3e95d95e9d27 req-0fb37ef7-2b8a-4e94-a3dd-58c79f2a2468 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Received event network-vif-deleted-6b70ec9a-65bb-4a1c-9312-97031fc4fc46 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1588.546346] env[63379]: INFO nova.compute.manager [req-1d5f191b-1838-477f-b7f2-3e95d95e9d27 req-0fb37ef7-2b8a-4e94-a3dd-58c79f2a2468 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Neutron deleted interface 6b70ec9a-65bb-4a1c-9312-97031fc4fc46; detaching it from the instance and deleting it from the info cache [ 1588.546521] env[63379]: DEBUG nova.network.neutron [req-1d5f191b-1838-477f-b7f2-3e95d95e9d27 req-0fb37ef7-2b8a-4e94-a3dd-58c79f2a2468 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.565213] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779514, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.565508] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779513, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.639664] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e1aeed-5c96-4bf9-bef3-b2fe9af23bec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.658344] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f175c68c-a467-493e-885a-0f2294c2229f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.694564] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481b0e00-db15-43c6-b901-9fef5b478f25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.704530] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0689cee5-92a5-40d6-b66d-583f2a1963f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.727281] env[63379]: DEBUG nova.network.neutron [-] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.732020] env[63379]: DEBUG nova.compute.provider_tree [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1588.732350] env[63379]: DEBUG nova.network.neutron [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updated VIF entry in instance network info cache for port 3f2cd71e-08fb-4de9-9736-18ae2bbad0eb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1588.732726] env[63379]: DEBUG nova.network.neutron [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updating instance_info_cache with network_info: [{"id": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "address": "fa:16:3e:7a:aa:83", "network": {"id": "8f3138b9-b170-40da-aa17-d0938c48221d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2072680575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "696eed8e898e4ffd831805df17a93d27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f2cd71e-08", "ovs_interfaceid": "3f2cd71e-08fb-4de9-9736-18ae2bbad0eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1588.814444] env[63379]: DEBUG oslo_vmware.api [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779509, 'name': PowerOnVM_Task, 'duration_secs': 0.816991} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.817386] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1588.817386] env[63379]: INFO nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Took 11.49 seconds to spawn the instance on the hypervisor. [ 1588.817386] env[63379]: DEBUG nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1588.817386] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b653fa-4db8-43ad-ba0f-966315ef45ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.983350] env[63379]: DEBUG nova.objects.base [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Object Instance<90f0c97d-695b-4975-8ab9-4e77a9175da1> lazy-loaded attributes: flavor,info_cache {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1589.047508] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779513, 'name': CreateVM_Task, 'duration_secs': 0.746074} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.047508] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1589.047897] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.048085] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.048404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1589.048723] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f0f348b-b472-4d31-87f4-4a9d58bc4c2d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.058900] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1579ef7-4c78-4dd2-b4a7-f3cd27415513 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.065061] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1589.065061] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8975b-ac9f-f20b-6e7a-21a847c614e7" [ 1589.065061] env[63379]: _type = "Task" [ 1589.065061] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.069794] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779514, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.080999] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee024eb-534f-4608-8d9d-3a8df01f9d81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.095580] env[63379]: DEBUG nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1589.097560] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8975b-ac9f-f20b-6e7a-21a847c614e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.179690] env[63379]: DEBUG nova.compute.manager [req-1d5f191b-1838-477f-b7f2-3e95d95e9d27 req-0fb37ef7-2b8a-4e94-a3dd-58c79f2a2468 service nova] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Detach interface failed, port_id=6b70ec9a-65bb-4a1c-9312-97031fc4fc46, reason: Instance 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1589.179690] env[63379]: DEBUG nova.network.neutron [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Updated VIF entry in instance network info cache for port 6a39414d-cc4f-4a85-997b-d633aec0bcef. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.179690] env[63379]: DEBUG nova.network.neutron [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Updating instance_info_cache with network_info: [{"id": "6a39414d-cc4f-4a85-997b-d633aec0bcef", "address": "fa:16:3e:a4:37:f2", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a39414d-cc", "ovs_interfaceid": "6a39414d-cc4f-4a85-997b-d633aec0bcef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1589.179690] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1589.180613] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1589.180613] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1589.180613] env[63379]: DEBUG nova.virt.hardware [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1589.180613] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4871c4d6-6c51-40f3-a6b3-c9415425d69c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.180613] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c2eec9-3af2-4588-9114-2ea650ef0a73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.188691] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1589.194490] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Creating folder: Project (3592040054864de4a13928fd22c193bc). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1589.195115] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e910a651-21c8-422e-bfd3-7cb6402c724f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.209794] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Created folder: Project (3592040054864de4a13928fd22c193bc) in parent group-v369214. [ 1589.210034] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Creating folder: Instances. Parent ref: group-v369379. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1589.210294] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be64ac78-5a94-4364-929f-728af64bcc1b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.222671] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Created folder: Instances in parent group-v369379. [ 1589.222940] env[63379]: DEBUG oslo.service.loopingcall [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1589.223160] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1589.223380] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f212d1ff-fede-4bbc-8e8f-2966e044eec6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.237289] env[63379]: INFO nova.compute.manager [-] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Took 1.66 seconds to deallocate network for instance. [ 1589.238017] env[63379]: DEBUG nova.scheduler.client.report [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1589.241385] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9e23990-69ce-486f-8bfc-be4eda52a8f5 req-2aced25d-f697-4370-9a16-11d5bec5e6fa service nova] Releasing lock "refresh_cache-6e022c9a-642b-4d96-8195-e56809bbd7b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.251258] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1589.251258] env[63379]: value = "task-1779517" [ 1589.251258] env[63379]: _type = "Task" [ 1589.251258] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.262038] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779517, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.335156] env[63379]: INFO nova.compute.manager [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Took 40.58 seconds to build instance. [ 1589.561901] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779514, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.583191] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8975b-ac9f-f20b-6e7a-21a847c614e7, 'name': SearchDatastore_Task, 'duration_secs': 0.020626} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.583826] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.584131] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1589.584577] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.584810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.585060] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1589.585372] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a059618-b34d-4a95-b828-76b3ab154361 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.595888] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1589.596139] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1589.596948] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bcc6050-14a6-4afd-97cf-8d8fc8ed7e34 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.606021] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1589.606021] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d7ff43-64b2-6f52-288f-6875795f5741" [ 1589.606021] env[63379]: _type = "Task" [ 1589.606021] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.617343] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d7ff43-64b2-6f52-288f-6875795f5741, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.647951] env[63379]: DEBUG oslo_concurrency.lockutils [req-fc17db93-fd51-4cad-a90b-74e1291d2aa1 req-964d0e05-3e97-4057-a87c-e1cf5ccb0c76 service nova] Releasing lock "refresh_cache-2a996f06-542e-4f71-95a4-0f71097d1478" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.648379] env[63379]: DEBUG nova.network.neutron [-] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.680307] env[63379]: DEBUG nova.compute.manager [req-28cf0fbf-d95a-4ed0-b857-22fd4dcf8771 req-449db6a3-805d-4b0e-83ae-df77fd414882 service nova] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Received event network-vif-deleted-3f2cd71e-08fb-4de9-9736-18ae2bbad0eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1589.747430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.748500] env[63379]: DEBUG nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1589.750523] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.934s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.754056] env[63379]: DEBUG nova.objects.instance [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lazy-loading 'resources' on Instance uuid d2f5b406-3d0e-4150-aeaf-7cdacbc12c06 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1589.772451] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779517, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.812458] env[63379]: INFO nova.compute.manager [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Took 0.57 seconds to detach 1 volumes for instance. [ 1589.816148] env[63379]: DEBUG nova.compute.manager [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Deleting volume: 263e34b9-3753-4240-8bd6-67c4019d79ae {{(pid=63379) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1589.836988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db3855c9-7b3e-40ae-b3e7-22e9c9c625e0 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.620s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.060915] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779514, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.72323} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.061636] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ec1f7a44-7344-43fb-9d51-688731d8ce14/ec1f7a44-7344-43fb-9d51-688731d8ce14.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1590.061883] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1590.062173] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d40fca5-9612-4a68-a94a-444ac3ababb0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.069581] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1590.069581] env[63379]: value = "task-1779519" [ 1590.069581] env[63379]: _type = "Task" [ 1590.069581] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.078751] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.118068] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d7ff43-64b2-6f52-288f-6875795f5741, 'name': SearchDatastore_Task, 'duration_secs': 0.009676} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.118719] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d073278-806c-4575-808f-5c2bb52ddb4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.125057] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1590.125057] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5214aa7d-1e06-3c80-1f19-bbfcbe93fdeb" [ 1590.125057] env[63379]: _type = "Task" [ 1590.125057] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.133663] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5214aa7d-1e06-3c80-1f19-bbfcbe93fdeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.151345] env[63379]: INFO nova.compute.manager [-] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Took 1.75 seconds to deallocate network for instance. [ 1590.166019] env[63379]: DEBUG nova.network.neutron [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.257907] env[63379]: DEBUG nova.compute.utils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1590.263778] env[63379]: DEBUG nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1590.263778] env[63379]: DEBUG nova.network.neutron [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1590.280274] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779517, 'name': CreateVM_Task, 'duration_secs': 0.712993} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.280274] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1590.280650] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.280846] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.281188] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1590.281506] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cae22c2-3353-4117-be37-64bdd47c0252 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.287387] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1590.287387] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52173ecc-2475-51ee-17f4-41c3cc919018" [ 1590.287387] env[63379]: _type = "Task" [ 1590.287387] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.305301] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52173ecc-2475-51ee-17f4-41c3cc919018, 'name': SearchDatastore_Task, 'duration_secs': 0.010975} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.305860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.306086] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1590.306314] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.333601] env[63379]: DEBUG nova.policy [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991a93509b8943a693859488a56352b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928a9d102f0e45b897eae72fa566c0fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1590.359174] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.583572] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073017} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.589059] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1590.590956] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd2ba11-6c20-498b-8a67-b8ee30755824 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.619331] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] ec1f7a44-7344-43fb-9d51-688731d8ce14/ec1f7a44-7344-43fb-9d51-688731d8ce14.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1590.623639] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6613f3d0-633a-46f8-8e8d-2285766baa4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.639484] env[63379]: DEBUG nova.compute.manager [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Received event network-changed-a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1590.639844] env[63379]: DEBUG nova.compute.manager [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Refreshing instance network info cache due to event network-changed-a8926575-6550-43c6-b23d-a15787ee76c0. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1590.639921] env[63379]: DEBUG oslo_concurrency.lockutils [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] Acquiring lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.640045] env[63379]: DEBUG oslo_concurrency.lockutils [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] Acquired lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.640213] env[63379]: DEBUG nova.network.neutron [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Refreshing network info cache for port a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1590.652797] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5214aa7d-1e06-3c80-1f19-bbfcbe93fdeb, 'name': SearchDatastore_Task, 'duration_secs': 0.010811} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.654408] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.654711] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2a996f06-542e-4f71-95a4-0f71097d1478/2a996f06-542e-4f71-95a4-0f71097d1478.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1590.655149] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1590.655149] env[63379]: value = "task-1779520" [ 1590.655149] env[63379]: _type = "Task" [ 1590.655149] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.655394] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.655701] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1590.655947] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11cc23e1-3fc4-4e80-848a-e7ef63966788 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.658935] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-deae9d43-79f1-475a-93d9-16ece7c5aa69 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.661723] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.673488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.674036] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1590.674036] env[63379]: value = "task-1779521" [ 1590.674036] env[63379]: _type = "Task" [ 1590.674036] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.678865] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779520, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.683243] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1590.683243] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1590.684091] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d88cd5e-baa6-4250-b06c-dc248aeb3b70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.693162] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.697409] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1590.697409] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5233e11b-617b-b44c-0d2f-4e8164525b7b" [ 1590.697409] env[63379]: _type = "Task" [ 1590.697409] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.708832] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5233e11b-617b-b44c-0d2f-4e8164525b7b, 'name': SearchDatastore_Task, 'duration_secs': 0.01141} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.712720] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50db42e1-2c15-4703-9470-e6c29761fc7d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.718622] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1590.718622] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3a61b-f18a-278e-878d-8327fea43589" [ 1590.718622] env[63379]: _type = "Task" [ 1590.718622] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.732100] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3a61b-f18a-278e-878d-8327fea43589, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.763560] env[63379]: DEBUG nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1590.861670] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe08466-8791-4ea9-89e3-4457eaace628 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.871987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcad843-10ec-4486-ada5-d4c30087561d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.876449] env[63379]: DEBUG nova.network.neutron [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Successfully created port: 04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1590.909702] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cdfffd-e8d3-4825-aade-c0b127e57432 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.920242] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a861c9bb-928a-4e69-8d26-729d4a41e820 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.937349] env[63379]: DEBUG nova.compute.provider_tree [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1591.177467] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779520, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.186669] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1591.187088] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-846f2bb1-6191-4d6c-881e-00fa494899c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.196374] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779521, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.202262] env[63379]: DEBUG oslo_vmware.api [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1591.202262] env[63379]: value = "task-1779522" [ 1591.202262] env[63379]: _type = "Task" [ 1591.202262] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.214499] env[63379]: DEBUG oslo_vmware.api [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.228124] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3a61b-f18a-278e-878d-8327fea43589, 'name': SearchDatastore_Task, 'duration_secs': 0.013911} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.228124] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.228431] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1591.229076] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de0cdcba-cafd-4418-bbb0-46777bdd288e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.244538] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1591.244538] env[63379]: value = "task-1779523" [ 1591.244538] env[63379]: _type = "Task" [ 1591.244538] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.266022] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779523, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.440940] env[63379]: DEBUG nova.scheduler.client.report [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1591.529298] env[63379]: DEBUG nova.network.neutron [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Updated VIF entry in instance network info cache for port a8926575-6550-43c6-b23d-a15787ee76c0. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1591.529713] env[63379]: DEBUG nova.network.neutron [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Updating instance_info_cache with network_info: [{"id": "a8926575-6550-43c6-b23d-a15787ee76c0", "address": "fa:16:3e:c2:d2:b4", "network": {"id": "b8a1048f-18ff-4dd7-a19e-5d58874f3f5d", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1410192054-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2c1f9216ee4d8e8349a27de543c2d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8926575-65", "ovs_interfaceid": "a8926575-6550-43c6-b23d-a15787ee76c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.670937] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779520, 'name': ReconfigVM_Task, 'duration_secs': 0.597852} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.671283] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Reconfigured VM instance instance-00000038 to attach disk [datastore1] ec1f7a44-7344-43fb-9d51-688731d8ce14/ec1f7a44-7344-43fb-9d51-688731d8ce14.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1591.671871] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-728a9bc2-761d-4b0a-8762-dff833965e52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.685156] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1591.685156] env[63379]: value = "task-1779524" [ 1591.685156] env[63379]: _type = "Task" [ 1591.685156] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.692486] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539676} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.693077] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2a996f06-542e-4f71-95a4-0f71097d1478/2a996f06-542e-4f71-95a4-0f71097d1478.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1591.693284] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1591.693611] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1144acc-e091-4c56-bb9a-ec27bcb18d91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.699021] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779524, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.707206] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1591.707206] env[63379]: value = "task-1779525" [ 1591.707206] env[63379]: _type = "Task" [ 1591.707206] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.715661] env[63379]: DEBUG oslo_vmware.api [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779522, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.722916] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.762184] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779523, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458347} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.762838] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1591.762921] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1591.763264] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1adb9db-47ca-44a3-ae56-9752f8647065 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.777562] env[63379]: DEBUG nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1591.779916] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1591.779916] env[63379]: value = "task-1779526" [ 1591.779916] env[63379]: _type = "Task" [ 1591.779916] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.789530] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779526, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.823612] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1591.824194] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1591.824194] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1591.824502] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1591.824698] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1591.825054] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1591.825114] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1591.825299] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1591.825493] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1591.826339] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1591.826339] env[63379]: DEBUG nova.virt.hardware [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1591.826821] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d178bff9-87ac-4861-af6c-4b790000ab7b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.836671] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b525961d-c38b-41b7-ab58-75288d298562 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.946234] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.196s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.952028] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.075s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.985039] env[63379]: INFO nova.scheduler.client.report [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Deleted allocations for instance d2f5b406-3d0e-4150-aeaf-7cdacbc12c06 [ 1592.032718] env[63379]: DEBUG oslo_concurrency.lockutils [req-ebbe9b37-a7ae-465d-9540-0695eb02e452 req-832c7d2f-e58f-4ce7-a1e2-9cc2a1e7f50e service nova] Releasing lock "refresh_cache-38be0e8d-188b-4a98-aedc-5d941b63c000" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.200336] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779524, 'name': Rename_Task, 'duration_secs': 0.24112} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.200395] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1592.200669] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82b50fa4-9420-4833-9ce9-69169ca4b15a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.208780] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1592.208780] env[63379]: value = "task-1779527" [ 1592.208780] env[63379]: _type = "Task" [ 1592.208780] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.218609] env[63379]: DEBUG oslo_vmware.api [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779522, 'name': PowerOnVM_Task, 'duration_secs': 0.569984} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.218609] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1592.218609] env[63379]: DEBUG nova.compute.manager [None req-ce01407d-68ad-41a3-922e-aeb993f6f324 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1592.220438] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a9a157-5303-40b3-9a61-aa130520c700 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.227886] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779527, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.227977] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074926} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.228897] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1592.229431] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8744362b-901c-4a34-9ccc-1ced1f5f17a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.262611] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 2a996f06-542e-4f71-95a4-0f71097d1478/2a996f06-542e-4f71-95a4-0f71097d1478.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1592.262611] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce5ce3e6-bf85-4551-b44c-b01503763223 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.286135] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1592.286135] env[63379]: value = "task-1779528" [ 1592.286135] env[63379]: _type = "Task" [ 1592.286135] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.293089] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779526, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089336} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.293769] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1592.294569] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547f4780-d6d4-4927-a5ca-8157d9da109d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.303829] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779528, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.322054] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1592.323685] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d47cc179-3599-4276-b322-2def82b1e8c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.343448] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1592.343448] env[63379]: value = "task-1779529" [ 1592.343448] env[63379]: _type = "Task" [ 1592.343448] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.352849] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779529, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.454385] env[63379]: INFO nova.compute.claims [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1592.498859] env[63379]: DEBUG oslo_concurrency.lockutils [None req-50799629-93a0-435f-bfaa-33fa45c0f1d3 tempest-ServersTestManualDisk-1813588105 tempest-ServersTestManualDisk-1813588105-project-member] Lock "d2f5b406-3d0e-4150-aeaf-7cdacbc12c06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.555s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.699832] env[63379]: DEBUG nova.network.neutron [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Successfully updated port: 04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1592.714789] env[63379]: DEBUG nova.compute.manager [req-9141a4b4-4b6b-424b-a217-3f27cffa35e2 req-72364284-32b8-45fe-bc7a-f657f2dc37b6 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Received event network-vif-plugged-04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1592.715014] env[63379]: DEBUG oslo_concurrency.lockutils [req-9141a4b4-4b6b-424b-a217-3f27cffa35e2 req-72364284-32b8-45fe-bc7a-f657f2dc37b6 service nova] Acquiring lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.715235] env[63379]: DEBUG oslo_concurrency.lockutils [req-9141a4b4-4b6b-424b-a217-3f27cffa35e2 req-72364284-32b8-45fe-bc7a-f657f2dc37b6 service nova] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.715469] env[63379]: DEBUG oslo_concurrency.lockutils [req-9141a4b4-4b6b-424b-a217-3f27cffa35e2 req-72364284-32b8-45fe-bc7a-f657f2dc37b6 service nova] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.715631] env[63379]: DEBUG nova.compute.manager [req-9141a4b4-4b6b-424b-a217-3f27cffa35e2 req-72364284-32b8-45fe-bc7a-f657f2dc37b6 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] No waiting events found dispatching network-vif-plugged-04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1592.716159] env[63379]: WARNING nova.compute.manager [req-9141a4b4-4b6b-424b-a217-3f27cffa35e2 req-72364284-32b8-45fe-bc7a-f657f2dc37b6 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Received unexpected event network-vif-plugged-04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 for instance with vm_state building and task_state spawning. [ 1592.729132] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779527, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.799067] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779528, 'name': ReconfigVM_Task, 'duration_secs': 0.294966} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.799264] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 2a996f06-542e-4f71-95a4-0f71097d1478/2a996f06-542e-4f71-95a4-0f71097d1478.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1592.799887] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1abaf37-082f-4ce0-8944-79b312dd3ae4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.808393] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1592.808393] env[63379]: value = "task-1779530" [ 1592.808393] env[63379]: _type = "Task" [ 1592.808393] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.817769] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779530, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.857104] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779529, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.961068] env[63379]: INFO nova.compute.resource_tracker [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating resource usage from migration 1d862343-7285-48b6-8ba8-374b0de20e47 [ 1593.168555] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "19a41941-0679-4971-8a44-c95b13f5c294" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.168825] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.169894] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.171354] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.202624] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-607f9774-0ffc-4ece-a7ba-419fdf6eb26b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.204653] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-607f9774-0ffc-4ece-a7ba-419fdf6eb26b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.204866] env[63379]: DEBUG nova.network.neutron [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1593.228529] env[63379]: DEBUG oslo_vmware.api [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779527, 'name': PowerOnVM_Task, 'duration_secs': 0.94279} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.229087] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1593.229507] env[63379]: INFO nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Took 11.90 seconds to spawn the instance on the hypervisor. [ 1593.229507] env[63379]: DEBUG nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1593.230433] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8553c3ee-d33d-41b9-a3ac-fddee82144e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.319682] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779530, 'name': Rename_Task, 'duration_secs': 0.154328} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.319977] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1593.322882] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed9060f5-1539-4a0f-b897-2a118b015c5c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.331930] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1593.331930] env[63379]: value = "task-1779531" [ 1593.331930] env[63379]: _type = "Task" [ 1593.331930] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.343571] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.355044] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779529, 'name': ReconfigVM_Task, 'duration_secs': 0.708368} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.355877] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Reconfigured VM instance instance-0000003a to attach disk [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1593.356360] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a724ae31-e856-41f6-8717-30cf4f7b9dc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.369084] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1593.369084] env[63379]: value = "task-1779532" [ 1593.369084] env[63379]: _type = "Task" [ 1593.369084] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.382503] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779532, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.481268] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9af9bcd-4c87-4eb7-9c5f-5b53e6d17e9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.490547] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b1bfe7-8724-426b-9f3a-da179ddf7842 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.526895] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259b476f-1038-42f7-9064-5c2f57377241 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.537430] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4ba66c-7b35-4f7a-8445-ed2ebe1864e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.554626] env[63379]: DEBUG nova.compute.provider_tree [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1593.672117] env[63379]: DEBUG nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1593.675914] env[63379]: DEBUG nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1593.761918] env[63379]: INFO nova.compute.manager [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Took 39.60 seconds to build instance. [ 1593.763989] env[63379]: DEBUG nova.network.neutron [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1593.843680] env[63379]: DEBUG oslo_vmware.api [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779531, 'name': PowerOnVM_Task, 'duration_secs': 0.492414} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.847016] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1593.847016] env[63379]: INFO nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1593.847016] env[63379]: DEBUG nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1593.847016] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52de5b64-96f7-48a5-8956-133e14abbc71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.881529] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779532, 'name': Rename_Task, 'duration_secs': 0.195249} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.881815] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1593.882114] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41049a3c-f70c-42a6-a986-58fef108ab0f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.891915] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1593.891915] env[63379]: value = "task-1779533" [ 1593.891915] env[63379]: _type = "Task" [ 1593.891915] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.903290] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779533, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.061038] env[63379]: DEBUG nova.scheduler.client.report [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1594.089372] env[63379]: DEBUG nova.network.neutron [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Updating instance_info_cache with network_info: [{"id": "04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82", "address": "fa:16:3e:6e:6e:f1", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04dc0f41-2a", "ovs_interfaceid": "04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.209065] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.210495] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.265162] env[63379]: DEBUG oslo_concurrency.lockutils [None req-25ec036d-0ed9-41c5-bc26-f0d44dc2a084 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.274s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.368391] env[63379]: INFO nova.compute.manager [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Took 37.69 seconds to build instance. [ 1594.404411] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779533, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.567630] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.619s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.567962] env[63379]: INFO nova.compute.manager [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Migrating [ 1594.575351] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.352s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.575683] env[63379]: DEBUG nova.objects.instance [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lazy-loading 'resources' on Instance uuid aa44a4ff-14e5-42d2-a082-06fe0ae9646c {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1594.592911] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-607f9774-0ffc-4ece-a7ba-419fdf6eb26b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.594075] env[63379]: DEBUG nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Instance network_info: |[{"id": "04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82", "address": "fa:16:3e:6e:6e:f1", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04dc0f41-2a", "ovs_interfaceid": "04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1594.594075] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:6e:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1594.605959] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating folder: Project (928a9d102f0e45b897eae72fa566c0fe). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1594.607450] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d31c3f8-9cb2-4553-8bd7-0297fcce9556 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.622507] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created folder: Project (928a9d102f0e45b897eae72fa566c0fe) in parent group-v369214. [ 1594.622589] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating folder: Instances. Parent ref: group-v369382. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1594.622903] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8bac68fa-e35c-4311-84c2-3ef571de0c18 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.635710] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created folder: Instances in parent group-v369382. [ 1594.635999] env[63379]: DEBUG oslo.service.loopingcall [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.636209] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1594.636431] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3143f47d-d0e5-4498-bcab-0b697230abc2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.661137] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1594.661137] env[63379]: value = "task-1779536" [ 1594.661137] env[63379]: _type = "Task" [ 1594.661137] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.668399] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779536, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.747401] env[63379]: DEBUG nova.compute.manager [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Received event network-changed-04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1594.747401] env[63379]: DEBUG nova.compute.manager [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Refreshing instance network info cache due to event network-changed-04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1594.747401] env[63379]: DEBUG oslo_concurrency.lockutils [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] Acquiring lock "refresh_cache-607f9774-0ffc-4ece-a7ba-419fdf6eb26b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.747401] env[63379]: DEBUG oslo_concurrency.lockutils [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] Acquired lock "refresh_cache-607f9774-0ffc-4ece-a7ba-419fdf6eb26b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.747401] env[63379]: DEBUG nova.network.neutron [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Refreshing network info cache for port 04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1594.874639] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3e7ae9c5-a7ec-40f6-be87-4b26d8ad3e54 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "2a996f06-542e-4f71-95a4-0f71097d1478" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.097s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.910344] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779533, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.089769] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.090727] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.092031] env[63379]: DEBUG nova.network.neutron [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1595.196468] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779536, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.266373] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.268835] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.408093] env[63379]: DEBUG oslo_vmware.api [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779533, 'name': PowerOnVM_Task, 'duration_secs': 1.045223} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.408692] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1595.408911] env[63379]: INFO nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Took 6.31 seconds to spawn the instance on the hypervisor. [ 1595.409101] env[63379]: DEBUG nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1595.410033] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc1ee88-20b7-446d-9de7-b41326a83dd9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.636730] env[63379]: DEBUG nova.network.neutron [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Updated VIF entry in instance network info cache for port 04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1595.636832] env[63379]: DEBUG nova.network.neutron [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Updating instance_info_cache with network_info: [{"id": "04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82", "address": "fa:16:3e:6e:6e:f1", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04dc0f41-2a", "ovs_interfaceid": "04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.671915] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779536, 'name': CreateVM_Task, 'duration_secs': 0.687048} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.673019] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1595.673787] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca039931-82ea-4403-b819-1e991d1c3731 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.676492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "158fe346-93f5-422b-877a-8423547da58f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.676729] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "158fe346-93f5-422b-877a-8423547da58f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.676937] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "158fe346-93f5-422b-877a-8423547da58f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.677154] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "158fe346-93f5-422b-877a-8423547da58f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.677341] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "158fe346-93f5-422b-877a-8423547da58f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.679171] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.679331] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.679652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1595.680172] env[63379]: INFO nova.compute.manager [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Terminating instance [ 1595.681520] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f586b1ed-43f8-43f6-bc72-eedeff4a06cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.683910] env[63379]: DEBUG nova.compute.manager [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1595.684229] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1595.685419] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baad7fd-6528-4a23-8510-c0e910d0d15b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.692165] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dac11b-31dc-4024-86cd-8c1a6983f60d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.699096] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1595.699096] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1595.699096] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5278eafc-c39c-e092-fe0d-339a15849848" [ 1595.699096] env[63379]: _type = "Task" [ 1595.699096] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.699477] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60625109-8b49-4877-bc60-046c690506cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.737481] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf01701b-faa8-4cc7-9f34-cefab7a42d46 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.744343] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5278eafc-c39c-e092-fe0d-339a15849848, 'name': SearchDatastore_Task, 'duration_secs': 0.014267} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.747305] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.747305] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1595.747563] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.747622] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.747875] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1595.748233] env[63379]: DEBUG oslo_vmware.api [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1595.748233] env[63379]: value = "task-1779537" [ 1595.748233] env[63379]: _type = "Task" [ 1595.748233] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.750518] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b398c52-acdf-4bb8-b939-9a8903041f24 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.760586] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ba9c30-6de7-4e91-a6a9-581ce318380a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.773487] env[63379]: DEBUG nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1595.776356] env[63379]: DEBUG oslo_vmware.api [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.786805] env[63379]: DEBUG nova.compute.provider_tree [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.790458] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1595.790622] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1595.792396] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3c74d5a-e94e-497a-ad7c-901ca98fde4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.798367] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1595.798367] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252059d-bffd-c69e-fbc5-c447ecf6393f" [ 1595.798367] env[63379]: _type = "Task" [ 1595.798367] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.808339] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252059d-bffd-c69e-fbc5-c447ecf6393f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.941977] env[63379]: INFO nova.compute.manager [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Took 36.70 seconds to build instance. [ 1595.979941] env[63379]: DEBUG nova.network.neutron [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance_info_cache with network_info: [{"id": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "address": "fa:16:3e:0c:14:52", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbe843e8-91", "ovs_interfaceid": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.142155] env[63379]: DEBUG oslo_concurrency.lockutils [req-44be1022-7d2b-48bc-84e2-d46490a0d600 req-03fb6d4f-739e-459d-a2fc-e47a34530338 service nova] Releasing lock "refresh_cache-607f9774-0ffc-4ece-a7ba-419fdf6eb26b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.263213] env[63379]: DEBUG oslo_vmware.api [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779537, 'name': PowerOffVM_Task, 'duration_secs': 0.412119} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.263513] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1596.263691] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1596.263955] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a55a289-e151-4a53-b2af-d30f069ac1b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.293268] env[63379]: DEBUG nova.scheduler.client.report [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1596.309306] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252059d-bffd-c69e-fbc5-c447ecf6393f, 'name': SearchDatastore_Task, 'duration_secs': 0.01157} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.312531] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dabcac9-6867-4ccb-b64d-44786081b26d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.316908] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.319121] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1596.319121] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52356e85-65cb-c3e4-045c-4168550ef35d" [ 1596.319121] env[63379]: _type = "Task" [ 1596.319121] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.328817] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52356e85-65cb-c3e4-045c-4168550ef35d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.412419] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1596.412419] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1596.412419] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Deleting the datastore file [datastore1] 158fe346-93f5-422b-877a-8423547da58f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1596.412419] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1a92375-eff1-4b64-83d7-3f12d86e5e68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.418773] env[63379]: DEBUG oslo_vmware.api [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for the task: (returnval){ [ 1596.418773] env[63379]: value = "task-1779539" [ 1596.418773] env[63379]: _type = "Task" [ 1596.418773] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.429445] env[63379]: DEBUG oslo_vmware.api [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.445389] env[63379]: DEBUG oslo_concurrency.lockutils [None req-34ee14e6-f586-4930-b9c5-2926ea7e6c7d tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "ac596f08-86a3-42e0-86e6-41a173fe868f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.508s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.487133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.561368] env[63379]: INFO nova.compute.manager [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Rebuilding instance [ 1596.614143] env[63379]: DEBUG nova.compute.manager [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1596.615084] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f925aaf5-e704-42e7-83f1-f5f507d914d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.800342] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.225s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.802787] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.807s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.825237] env[63379]: INFO nova.scheduler.client.report [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Deleted allocations for instance aa44a4ff-14e5-42d2-a082-06fe0ae9646c [ 1596.836167] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52356e85-65cb-c3e4-045c-4168550ef35d, 'name': SearchDatastore_Task, 'duration_secs': 0.037618} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.836167] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.836167] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 607f9774-0ffc-4ece-a7ba-419fdf6eb26b/607f9774-0ffc-4ece-a7ba-419fdf6eb26b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1596.836431] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e6f87e8-93f6-43a6-939f-2b4e37fb9f36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.848751] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1596.848751] env[63379]: value = "task-1779540" [ 1596.848751] env[63379]: _type = "Task" [ 1596.848751] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.862819] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.930305] env[63379]: DEBUG oslo_vmware.api [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Task: {'id': task-1779539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192927} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.930600] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1596.930797] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1596.931058] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1596.931270] env[63379]: INFO nova.compute.manager [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] [instance: 158fe346-93f5-422b-877a-8423547da58f] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1596.931521] env[63379]: DEBUG oslo.service.loopingcall [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1596.931715] env[63379]: DEBUG nova.compute.manager [-] [instance: 158fe346-93f5-422b-877a-8423547da58f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1596.931811] env[63379]: DEBUG nova.network.neutron [-] [instance: 158fe346-93f5-422b-877a-8423547da58f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1597.131138] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1597.131138] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c4c5603-f5a2-43ea-9bf1-5655c4c8d268 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.140753] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1597.140753] env[63379]: value = "task-1779541" [ 1597.140753] env[63379]: _type = "Task" [ 1597.140753] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.160477] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.212746] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "b91a5b89-0456-431d-b099-adda3a6b3024" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.215020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "b91a5b89-0456-431d-b099-adda3a6b3024" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.342718] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e2a8e779-188f-490a-b98a-05a17f960434 tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "aa44a4ff-14e5-42d2-a082-06fe0ae9646c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.727s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.364709] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779540, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.484239] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a341da9-ba54-4c8b-994d-a3fdeac7c499 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.494731] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7990c2-10a5-4264-821b-80403ff2222c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.532335] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67610e55-5d9f-48e8-9c2b-7066389d0502 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.541338] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29499bfc-073a-4b08-a77d-22131820d497 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.556532] env[63379]: DEBUG nova.compute.provider_tree [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1597.589691] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.590064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.590454] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.590673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.590851] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.594052] env[63379]: INFO nova.compute.manager [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Terminating instance [ 1597.596923] env[63379]: DEBUG nova.compute.manager [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1597.596923] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1597.597694] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c3fcf5-7c86-4964-be97-f61dcfc00e72 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.609262] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1597.609262] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5d6db44-a495-4d68-942d-497924441a59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.616711] env[63379]: DEBUG oslo_vmware.api [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1597.616711] env[63379]: value = "task-1779542" [ 1597.616711] env[63379]: _type = "Task" [ 1597.616711] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.619139] env[63379]: DEBUG nova.compute.manager [req-7463d682-2335-4cb8-94a2-a3c74102217c req-8cb0824b-3a6a-411e-a176-574f1cf1915a service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Received event network-vif-deleted-ce8dbca6-e4fa-47a3-b501-18973a50219c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1597.619342] env[63379]: INFO nova.compute.manager [req-7463d682-2335-4cb8-94a2-a3c74102217c req-8cb0824b-3a6a-411e-a176-574f1cf1915a service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Neutron deleted interface ce8dbca6-e4fa-47a3-b501-18973a50219c; detaching it from the instance and deleting it from the info cache [ 1597.619451] env[63379]: DEBUG nova.network.neutron [req-7463d682-2335-4cb8-94a2-a3c74102217c req-8cb0824b-3a6a-411e-a176-574f1cf1915a service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.632571] env[63379]: DEBUG oslo_vmware.api [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.649290] env[63379]: DEBUG nova.compute.manager [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1597.652960] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e7e274-a817-465b-a6f8-2590c43f1fcc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.656059] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779541, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.717526] env[63379]: DEBUG nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1597.862184] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779540, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.763391} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.862510] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 607f9774-0ffc-4ece-a7ba-419fdf6eb26b/607f9774-0ffc-4ece-a7ba-419fdf6eb26b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1597.862852] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1597.863174] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ab4bd98-c4b2-48ba-a3f6-568996b6515a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.875733] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1597.875733] env[63379]: value = "task-1779543" [ 1597.875733] env[63379]: _type = "Task" [ 1597.875733] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.897520] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.005974] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af93a48-1ee6-46ce-a173-3b76bf45a512 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.026752] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance 'f082cdd7-228e-4100-b301-5af6daea9b36' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1598.085730] env[63379]: ERROR nova.scheduler.client.report [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [req-063a1b32-20e5-4021-84a2-6c9d528b41ff] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-063a1b32-20e5-4021-84a2-6c9d528b41ff"}]}: nova.exception.BuildAbortException: Build of instance a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 aborted: Failed to rebuild volume backed instance. [ 1598.096100] env[63379]: DEBUG nova.network.neutron [-] [instance: 158fe346-93f5-422b-877a-8423547da58f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.102121] env[63379]: DEBUG nova.scheduler.client.report [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1598.123108] env[63379]: DEBUG nova.scheduler.client.report [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1598.123322] env[63379]: DEBUG nova.compute.provider_tree [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1598.127407] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-14937c60-fafc-40af-ab47-16feec5adf2c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.132490] env[63379]: DEBUG oslo_vmware.api [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779542, 'name': PowerOffVM_Task, 'duration_secs': 0.402791} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.133520] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1598.133698] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1598.133968] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb2107d1-4af2-4f51-b93c-efb48578f8cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.137861] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb45cd7-cd1c-48cc-b03d-0efe723dbbac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.148488] env[63379]: DEBUG nova.scheduler.client.report [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1598.161241] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779541, 'name': PowerOffVM_Task, 'duration_secs': 0.558202} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.161491] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1598.161704] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1598.162457] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7692673-6e94-4c29-856f-0c12b1e88b97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.165715] env[63379]: INFO nova.compute.manager [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] instance snapshotting [ 1598.181062] env[63379]: DEBUG nova.compute.manager [req-7463d682-2335-4cb8-94a2-a3c74102217c req-8cb0824b-3a6a-411e-a176-574f1cf1915a service nova] [instance: 158fe346-93f5-422b-877a-8423547da58f] Detach interface failed, port_id=ce8dbca6-e4fa-47a3-b501-18973a50219c, reason: Instance 158fe346-93f5-422b-877a-8423547da58f could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1598.182601] env[63379]: DEBUG nova.scheduler.client.report [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1598.185253] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3223790a-a672-45a5-bbf9-e2d07bb45446 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.190608] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1598.191072] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5a1f349-9e03-4b0e-8679-a29e7935e0c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.207329] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bc2586-c576-48af-be47-9ed69e2d21ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.219432] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1598.219653] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1598.219899] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Deleting the datastore file [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1598.220385] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92e2a62c-a2f1-406a-a8be-d01643226d71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.228611] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1598.228611] env[63379]: value = "task-1779546" [ 1598.228611] env[63379]: _type = "Task" [ 1598.228611] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.239332] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.246774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.388093] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.517259] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1598.517487] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1598.517665] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Deleting the datastore file [datastore1] ee36cc5f-61a1-4e4f-9cae-670f5868d90c {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1598.518028] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3ddc780-5a66-4f4e-9033-2e42862439c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.523768] env[63379]: DEBUG oslo_vmware.api [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for the task: (returnval){ [ 1598.523768] env[63379]: value = "task-1779547" [ 1598.523768] env[63379]: _type = "Task" [ 1598.523768] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.535335] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1598.535617] env[63379]: DEBUG oslo_vmware.api [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.535856] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ca26bd0-24d7-4f59-a4b2-8613b667a0c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.541725] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1598.541725] env[63379]: value = "task-1779548" [ 1598.541725] env[63379]: _type = "Task" [ 1598.541725] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.550492] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.599016] env[63379]: INFO nova.compute.manager [-] [instance: 158fe346-93f5-422b-877a-8423547da58f] Took 1.67 seconds to deallocate network for instance. [ 1598.599950] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c6b1b1-ec32-4ac8-86d2-4a09ec4122e1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.610537] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de87648-284c-4c18-b3e8-acd8d316ffe9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.643280] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbba8ff-6753-47b4-a211-ad966b96ad41 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.652254] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c87b5f6-cca3-4461-8514-17dfb0905ffc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.667178] env[63379]: DEBUG nova.compute.provider_tree [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1598.722404] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1598.722726] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-050f8ebe-c958-4bb3-8c4f-0bb6c5f29644 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.731432] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1598.731432] env[63379]: value = "task-1779549" [ 1598.731432] env[63379]: _type = "Task" [ 1598.731432] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.743985] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.43085} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.747074] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1598.747307] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1598.747505] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1598.750113] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779549, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.889856] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.033925] env[63379]: DEBUG oslo_vmware.api [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Task: {'id': task-1779547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.367748} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.034231] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1599.034432] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1599.034625] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1599.034816] env[63379]: INFO nova.compute.manager [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Took 1.44 seconds to destroy the instance on the hypervisor. [ 1599.035143] env[63379]: DEBUG oslo.service.loopingcall [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1599.035360] env[63379]: DEBUG nova.compute.manager [-] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1599.035457] env[63379]: DEBUG nova.network.neutron [-] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1599.052327] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779548, 'name': PowerOffVM_Task, 'duration_secs': 0.28431} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.052630] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1599.052823] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance 'f082cdd7-228e-4100-b301-5af6daea9b36' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1599.111286] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.170019] env[63379]: DEBUG nova.scheduler.client.report [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1599.247139] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779549, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.389422] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.560101] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1599.560101] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1599.560101] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1599.560101] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1599.560101] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1599.560101] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1599.560450] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1599.560839] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1599.561509] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1599.561868] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1599.565020] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1599.568303] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7c16eb1-7e64-4334-9245-da15f79f9daa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.586945] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1599.586945] env[63379]: value = "task-1779550" [ 1599.586945] env[63379]: _type = "Task" [ 1599.586945] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.595219] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.652135] env[63379]: DEBUG nova.compute.manager [req-2d49f0af-ed90-4c6b-93e9-807c10f41f90 req-e6db48a4-9f04-495e-8339-5489a9a312cd service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Received event network-vif-deleted-4b39f7fe-6ef6-4804-b4b1-102adc940d55 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1599.652135] env[63379]: INFO nova.compute.manager [req-2d49f0af-ed90-4c6b-93e9-807c10f41f90 req-e6db48a4-9f04-495e-8339-5489a9a312cd service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Neutron deleted interface 4b39f7fe-6ef6-4804-b4b1-102adc940d55; detaching it from the instance and deleting it from the info cache [ 1599.652135] env[63379]: DEBUG nova.network.neutron [req-2d49f0af-ed90-4c6b-93e9-807c10f41f90 req-e6db48a4-9f04-495e-8339-5489a9a312cd service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.675936] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.873s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.676121] env[63379]: INFO nova.compute.manager [None req-6f62d783-9ae0-412a-a3c5-f68d3c9ceb45 tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Successfully reverted task state from rebuilding on failure for instance. [ 1599.681282] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.319s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.681513] env[63379]: DEBUG nova.objects.instance [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lazy-loading 'resources' on Instance uuid 915aec20-5765-4aad-8b0f-f2d71b7d6428 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1599.744102] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779549, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.784607] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1599.784858] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1599.785208] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1599.785530] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1599.785730] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1599.785892] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1599.786137] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1599.786318] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1599.786520] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1599.786672] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1599.786855] env[63379]: DEBUG nova.virt.hardware [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1599.788241] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5198cf-cd01-4452-bb09-56705cd07126 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.796880] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e9983c-70cc-4d75-bd06-752c32ba89e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.810513] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1599.816376] env[63379]: DEBUG oslo.service.loopingcall [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1599.816616] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1599.816827] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2aac6cd-11dd-43b1-a6c4-d3001f8219f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.833393] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1599.833393] env[63379]: value = "task-1779551" [ 1599.833393] env[63379]: _type = "Task" [ 1599.833393] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.841051] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779551, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.890693] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.604776} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.891127] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1599.892395] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f66eb5-f0b3-4170-81f2-557f71de9e0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.920480] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 607f9774-0ffc-4ece-a7ba-419fdf6eb26b/607f9774-0ffc-4ece-a7ba-419fdf6eb26b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1599.921158] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d6342a4-3d5e-4c95-afca-ff3ed50b7d73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.942042] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1599.942042] env[63379]: value = "task-1779552" [ 1599.942042] env[63379]: _type = "Task" [ 1599.942042] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.954263] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779552, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.041329] env[63379]: DEBUG nova.network.neutron [-] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.096549] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779550, 'name': ReconfigVM_Task, 'duration_secs': 0.169558} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.096870] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance 'f082cdd7-228e-4100-b301-5af6daea9b36' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1600.155121] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8e674da-f517-40db-8f92-3f10a60fe814 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.165792] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b5de4c-c0a7-4334-af08-20ff26a2be87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.204256] env[63379]: DEBUG nova.compute.manager [req-2d49f0af-ed90-4c6b-93e9-807c10f41f90 req-e6db48a4-9f04-495e-8339-5489a9a312cd service nova] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Detach interface failed, port_id=4b39f7fe-6ef6-4804-b4b1-102adc940d55, reason: Instance ee36cc5f-61a1-4e4f-9cae-670f5868d90c could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1600.245018] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779549, 'name': CreateSnapshot_Task, 'duration_secs': 1.18902} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.248047] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1600.249317] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf66ebcd-1453-4066-b022-feae7494236b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.343034] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779551, 'name': CreateVM_Task, 'duration_secs': 0.265872} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.345154] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1600.345792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.345959] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.346291] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1600.346591] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5051f31a-d741-468f-bb87-e4851e0f0d5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.350978] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1600.350978] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528aa843-07f8-dc08-bf7f-2461020871fa" [ 1600.350978] env[63379]: _type = "Task" [ 1600.350978] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.362504] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528aa843-07f8-dc08-bf7f-2461020871fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.454743] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779552, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.544099] env[63379]: INFO nova.compute.manager [-] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Took 1.51 seconds to deallocate network for instance. [ 1600.558866] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebc65b8-9a53-47ed-87ff-760e33e49478 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.567032] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2615088b-18a5-4f03-8bc8-e2bdb0f31fe4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.598560] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f338821c-d5bd-47a0-aeaf-f7aedd0dbf4a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.603241] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:30:29Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='fd63d07f-2af7-4c40-ac44-c2f8123389ab',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-418275153',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1600.603582] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1600.603673] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1600.603880] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1600.604640] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1600.604834] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1600.607476] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1600.607730] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1600.607963] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1600.608180] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1600.608371] env[63379]: DEBUG nova.virt.hardware [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1600.613848] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfiguring VM instance instance-00000033 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1600.614473] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f31999b-07a6-4078-aadf-b37ce4452098 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.634138] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6433f960-6eaa-42df-be42-ac87a90424d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.638154] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1600.638154] env[63379]: value = "task-1779553" [ 1600.638154] env[63379]: _type = "Task" [ 1600.638154] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.650311] env[63379]: DEBUG nova.compute.provider_tree [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1600.659273] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.769568] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1600.769906] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8f0268cf-c8d8-496e-8f94-642d95d9c413 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.779797] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1600.779797] env[63379]: value = "task-1779554" [ 1600.779797] env[63379]: _type = "Task" [ 1600.779797] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.788022] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779554, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.864701] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528aa843-07f8-dc08-bf7f-2461020871fa, 'name': SearchDatastore_Task, 'duration_secs': 0.019991} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.865068] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.865315] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1600.865603] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.865765] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.865946] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1600.866266] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b2deb7c-f277-49fb-ab47-294feeca440a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.889401] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1600.889645] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1600.890570] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d988215-8b95-441a-b517-1988f0562cd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.896390] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1600.896390] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524cba7c-7125-17e0-64bc-e99985954320" [ 1600.896390] env[63379]: _type = "Task" [ 1600.896390] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.904819] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524cba7c-7125-17e0-64bc-e99985954320, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.919855] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.920127] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.953493] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779552, 'name': ReconfigVM_Task, 'duration_secs': 0.675715} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.953788] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 607f9774-0ffc-4ece-a7ba-419fdf6eb26b/607f9774-0ffc-4ece-a7ba-419fdf6eb26b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1600.954785] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b174cd32-59f0-4566-b580-92194bf90022 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.961741] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1600.961741] env[63379]: value = "task-1779555" [ 1600.961741] env[63379]: _type = "Task" [ 1600.961741] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.970290] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779555, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.052862] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.148933] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779553, 'name': ReconfigVM_Task, 'duration_secs': 0.482824} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.149231] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfigured VM instance instance-00000033 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1601.150063] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd740784-4e53-457a-aaf2-7b26d0fd254e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.153214] env[63379]: DEBUG nova.scheduler.client.report [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1601.178668] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1601.179663] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1049acf5-6515-4c06-85a8-920d40318520 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.200521] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1601.200521] env[63379]: value = "task-1779556" [ 1601.200521] env[63379]: _type = "Task" [ 1601.200521] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.210951] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779556, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.291412] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779554, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.408598] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524cba7c-7125-17e0-64bc-e99985954320, 'name': SearchDatastore_Task, 'duration_secs': 0.009609} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.409492] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46cfd388-8cce-4637-8a5f-3a49f2f00792 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.415152] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1601.415152] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5236fcc0-50db-9d5d-c114-4c448849e65e" [ 1601.415152] env[63379]: _type = "Task" [ 1601.415152] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.424233] env[63379]: DEBUG nova.compute.utils [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1601.425487] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5236fcc0-50db-9d5d-c114-4c448849e65e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.475108] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779555, 'name': Rename_Task, 'duration_secs': 0.168075} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.475611] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1601.475904] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4149dba2-fe75-499f-8e7d-4dd17926c205 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.482787] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1601.482787] env[63379]: value = "task-1779557" [ 1601.482787] env[63379]: _type = "Task" [ 1601.482787] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.490603] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.658920] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.661235] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.600s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.662755] env[63379]: INFO nova.compute.claims [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1601.677272] env[63379]: INFO nova.scheduler.client.report [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Deleted allocations for instance 915aec20-5765-4aad-8b0f-f2d71b7d6428 [ 1601.710573] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779556, 'name': ReconfigVM_Task, 'duration_secs': 0.311323} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.710976] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfigured VM instance instance-00000033 to attach disk [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1601.711396] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance 'f082cdd7-228e-4100-b301-5af6daea9b36' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1601.791553] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779554, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.926656] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.927230] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5236fcc0-50db-9d5d-c114-4c448849e65e, 'name': SearchDatastore_Task, 'duration_secs': 0.011138} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.927523] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.927778] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1601.928734] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bd98e76-8b97-4d9e-a4e1-ca6df557c7c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.935446] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1601.935446] env[63379]: value = "task-1779558" [ 1601.935446] env[63379]: _type = "Task" [ 1601.935446] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.943581] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.993915] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779557, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.184661] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3d772cdd-d35b-4aae-919a-d57f0bbd6820 tempest-FloatingIPsAssociationTestJSON-1881138532 tempest-FloatingIPsAssociationTestJSON-1881138532-project-member] Lock "915aec20-5765-4aad-8b0f-f2d71b7d6428" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.094s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.218415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ce0d00-3be6-4c1c-9327-d349a03ecb29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.243924] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe78e7c-4870-4741-a54e-6a5a52b9c3be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.264912] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance 'f082cdd7-228e-4100-b301-5af6daea9b36' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1602.291287] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779554, 'name': CloneVM_Task, 'duration_secs': 1.334268} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.291601] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Created linked-clone VM from snapshot [ 1602.292415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a0f0fa-343f-4054-aa05-16eb23919ef5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.300443] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Uploading image b44bda3f-4960-466b-9abc-871f5bd53818 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1602.316616] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1602.316909] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a2f6f02d-beea-425f-a6c9-326225efa64a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.325034] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1602.325034] env[63379]: value = "task-1779559" [ 1602.325034] env[63379]: _type = "Task" [ 1602.325034] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.336019] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779559, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.445953] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779558, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.492821] env[63379]: DEBUG oslo_vmware.api [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779557, 'name': PowerOnVM_Task, 'duration_secs': 0.787822} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.494169] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1602.494416] env[63379]: INFO nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Took 10.72 seconds to spawn the instance on the hypervisor. [ 1602.494579] env[63379]: DEBUG nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1602.497702] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71d6d33-12dc-4066-854c-050e1cbc4cd1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.836170] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779559, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.837074] env[63379]: DEBUG nova.network.neutron [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Port bbe843e8-9156-454e-8ba4-dae6bc31c8b2 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1602.946987] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525454} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.949733] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1602.949974] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1602.950425] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-978e3682-920d-4d45-8903-bac07112c731 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.957489] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1602.957489] env[63379]: value = "task-1779560" [ 1602.957489] env[63379]: _type = "Task" [ 1602.957489] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.968432] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.003203] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.003518] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.003779] env[63379]: INFO nova.compute.manager [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Attaching volume 3c7b78db-230f-4c84-98a6-2e17f07510bc to /dev/sdb [ 1603.020851] env[63379]: INFO nova.compute.manager [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Took 41.71 seconds to build instance. [ 1603.064479] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ecfceb-f50b-4321-976f-41d33996b713 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.074845] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff916a5-1d0b-4c50-95a6-63d450a2b962 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.090101] env[63379]: DEBUG nova.virt.block_device [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updating existing volume attachment record: a10d4b15-726a-495b-b776-a9391ad8c032 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1603.113913] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfa447a-ddf5-4811-bddc-8c4416f52453 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.121482] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a93dfd-2df6-48e4-a4a7-1527af047f57 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.156405] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84da149-a23a-4813-8917-7d7387e6fc81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.164311] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1383e1b0-ba6c-4dc8-8829-11e6e13e39de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.178394] env[63379]: DEBUG nova.compute.provider_tree [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1603.335788] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779559, 'name': Destroy_Task, 'duration_secs': 0.974322} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.336089] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Destroyed the VM [ 1603.336321] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1603.336638] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-360dbf4b-b149-4801-bd44-6670727fb7c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.345821] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1603.345821] env[63379]: value = "task-1779562" [ 1603.345821] env[63379]: _type = "Task" [ 1603.345821] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.350277] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.356737] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779562, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.467111] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065137} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.467423] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1603.468219] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a37aad8-37e5-411e-b48a-1f3d2ddba113 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.488832] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1603.489152] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65d5fb78-04d2-4a3e-bb06-9452ee3d02ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.508643] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1603.508643] env[63379]: value = "task-1779565" [ 1603.508643] env[63379]: _type = "Task" [ 1603.508643] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.520422] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779565, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.523461] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a1cb7923-d9e9-47aa-baa0-825df13e3b23 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.192s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.523723] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.174s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.523944] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.524188] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.524384] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.526826] env[63379]: INFO nova.compute.manager [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Terminating instance [ 1603.528450] env[63379]: DEBUG nova.compute.manager [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1603.528650] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1603.529463] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4bded7-8e6d-4562-b06a-9f0f0bf9bf1f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.540687] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1603.541012] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5dfcb30-9185-4531-a62d-efff14982420 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.549247] env[63379]: DEBUG oslo_vmware.api [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1603.549247] env[63379]: value = "task-1779566" [ 1603.549247] env[63379]: _type = "Task" [ 1603.549247] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.559605] env[63379]: DEBUG oslo_vmware.api [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.683408] env[63379]: DEBUG nova.scheduler.client.report [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1603.867522] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.870708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.870708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.877418] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779562, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.024065] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779565, 'name': ReconfigVM_Task, 'duration_secs': 0.341933} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.024174] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Reconfigured VM instance instance-0000003a to attach disk [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f/ac596f08-86a3-42e0-86e6-41a173fe868f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1604.024998] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b6a01df-a9b0-4a17-9fb6-37e06b0bedfb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.032709] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1604.032709] env[63379]: value = "task-1779567" [ 1604.032709] env[63379]: _type = "Task" [ 1604.032709] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.043925] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779567, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.061999] env[63379]: DEBUG oslo_vmware.api [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779566, 'name': PowerOffVM_Task, 'duration_secs': 0.191404} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.062206] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1604.062479] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1604.062856] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a152e95a-b529-443a-abb5-a941876593af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.191410] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.192245] env[63379]: DEBUG nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1604.198365] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.113s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.198717] env[63379]: DEBUG nova.objects.instance [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lazy-loading 'resources' on Instance uuid a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1604.200440] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1604.200763] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1604.201083] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleting the datastore file [datastore1] 607f9774-0ffc-4ece-a7ba-419fdf6eb26b {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1604.202857] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7f7ffd7-ffa5-4195-9823-6d2bbf23d3f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.214719] env[63379]: DEBUG oslo_vmware.api [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1604.214719] env[63379]: value = "task-1779569" [ 1604.214719] env[63379]: _type = "Task" [ 1604.214719] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.224243] env[63379]: DEBUG oslo_vmware.api [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.357512] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779562, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.543715] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779567, 'name': Rename_Task, 'duration_secs': 0.159302} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.544138] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1604.544463] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6bcbb2e-fe4b-4eda-8526-63e804c72a88 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.552409] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1604.552409] env[63379]: value = "task-1779570" [ 1604.552409] env[63379]: _type = "Task" [ 1604.552409] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.560475] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779570, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.706627] env[63379]: DEBUG nova.compute.utils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1604.707924] env[63379]: DEBUG nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1604.708119] env[63379]: DEBUG nova.network.neutron [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1604.726712] env[63379]: DEBUG oslo_vmware.api [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.425646} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.727087] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1604.727404] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1604.727839] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1604.727839] env[63379]: INFO nova.compute.manager [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1604.728129] env[63379]: DEBUG oslo.service.loopingcall [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1604.728570] env[63379]: DEBUG nova.compute.manager [-] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1604.728724] env[63379]: DEBUG nova.network.neutron [-] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1604.770461] env[63379]: DEBUG nova.policy [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9162483675d540dfb8551206627b50e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '767980ba969142098ccbdf031f6e62a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1604.868700] env[63379]: DEBUG oslo_vmware.api [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779562, 'name': RemoveSnapshot_Task, 'duration_secs': 1.056104} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.869424] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1604.929935] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.930164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.930429] env[63379]: DEBUG nova.network.neutron [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1605.067161] env[63379]: DEBUG oslo_vmware.api [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779570, 'name': PowerOnVM_Task, 'duration_secs': 0.434867} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.067161] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1605.067161] env[63379]: DEBUG nova.compute.manager [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1605.067161] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6651f16e-49a2-4a61-8a29-fca388d21969 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.211745] env[63379]: DEBUG nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1605.215899] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101771c2-1487-4c8d-9114-23c7595d2ccb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.225048] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4ae484-a3cc-439a-9577-7b6d381d8963 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.263888] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e230ec2-59cf-4f9c-a056-d78e7446d6d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.267537] env[63379]: DEBUG nova.compute.manager [req-d02e95a8-18fc-42fc-ba68-760c47a7b957 req-bb657dd2-73e3-40ea-b29b-710530eeea27 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Received event network-vif-deleted-04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1605.267754] env[63379]: INFO nova.compute.manager [req-d02e95a8-18fc-42fc-ba68-760c47a7b957 req-bb657dd2-73e3-40ea-b29b-710530eeea27 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Neutron deleted interface 04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82; detaching it from the instance and deleting it from the info cache [ 1605.267906] env[63379]: DEBUG nova.network.neutron [req-d02e95a8-18fc-42fc-ba68-760c47a7b957 req-bb657dd2-73e3-40ea-b29b-710530eeea27 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.270248] env[63379]: DEBUG nova.network.neutron [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Successfully created port: 93c003c1-3952-4c3f-ac43-f471addf4090 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1605.278869] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469c5e10-eff0-4925-a6d7-2a71678af471 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.293256] env[63379]: DEBUG nova.compute.provider_tree [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.376681] env[63379]: WARNING nova.compute.manager [None req-ce69cd3b-5bb8-42f0-ba62-e30ba5011587 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Image not found during snapshot: nova.exception.ImageNotFound: Image b44bda3f-4960-466b-9abc-871f5bd53818 could not be found. [ 1605.599037] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.685889] env[63379]: DEBUG nova.network.neutron [-] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.771011] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-151a6b1f-0f49-4a88-9d39-2511becb7e36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.775166] env[63379]: DEBUG nova.network.neutron [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance_info_cache with network_info: [{"id": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "address": "fa:16:3e:0c:14:52", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbe843e8-91", "ovs_interfaceid": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.784539] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e316c108-4c74-4c91-853b-ff911e9515c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.799357] env[63379]: DEBUG nova.scheduler.client.report [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1605.833456] env[63379]: DEBUG nova.compute.manager [req-d02e95a8-18fc-42fc-ba68-760c47a7b957 req-bb657dd2-73e3-40ea-b29b-710530eeea27 service nova] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Detach interface failed, port_id=04dc0f41-2a3f-4db5-8cfd-7c1709ad3e82, reason: Instance 607f9774-0ffc-4ece-a7ba-419fdf6eb26b could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1605.840608] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "2a996f06-542e-4f71-95a4-0f71097d1478" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.840931] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "2a996f06-542e-4f71-95a4-0f71097d1478" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.841166] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "2a996f06-542e-4f71-95a4-0f71097d1478-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.841355] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "2a996f06-542e-4f71-95a4-0f71097d1478-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.841528] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "2a996f06-542e-4f71-95a4-0f71097d1478-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.843853] env[63379]: INFO nova.compute.manager [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Terminating instance [ 1605.845960] env[63379]: DEBUG nova.compute.manager [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1605.846478] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.847331] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a59549f-2df1-42fb-9b8b-f4ad32d24aba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.855114] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.855361] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2011c2e-73c9-419d-823f-0c62fbbe4fee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.861588] env[63379]: DEBUG oslo_vmware.api [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1605.861588] env[63379]: value = "task-1779572" [ 1605.861588] env[63379]: _type = "Task" [ 1605.861588] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.869680] env[63379]: DEBUG oslo_vmware.api [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.180170] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "ac596f08-86a3-42e0-86e6-41a173fe868f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.180454] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "ac596f08-86a3-42e0-86e6-41a173fe868f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.180667] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "ac596f08-86a3-42e0-86e6-41a173fe868f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.180856] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "ac596f08-86a3-42e0-86e6-41a173fe868f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.181051] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "ac596f08-86a3-42e0-86e6-41a173fe868f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.183356] env[63379]: INFO nova.compute.manager [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Terminating instance [ 1606.185121] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "refresh_cache-ac596f08-86a3-42e0-86e6-41a173fe868f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.185349] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquired lock "refresh_cache-ac596f08-86a3-42e0-86e6-41a173fe868f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.185798] env[63379]: DEBUG nova.network.neutron [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1606.191200] env[63379]: INFO nova.compute.manager [-] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Took 1.46 seconds to deallocate network for instance. [ 1606.226798] env[63379]: DEBUG nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1606.260081] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d6ae078deef25c47c0c1fad23c60d344',container_format='bare',created_at=2024-12-11T23:31:38Z,direct_url=,disk_format='vmdk',id=c9c544a4-5a35-4c31-896a-05c58c561419,min_disk=1,min_ram=0,name='tempest-test-snap-1093530973',owner='767980ba969142098ccbdf031f6e62a9',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-11T23:31:57Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1606.260081] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1606.260226] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1606.260377] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1606.260530] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1606.260686] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1606.260903] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1606.261574] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1606.261812] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1606.262045] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1606.262316] env[63379]: DEBUG nova.virt.hardware [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1606.263870] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633ff70d-ccca-4724-9bb4-22c0ca086c7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.272976] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e470732-437a-4c9f-8974-2cac3c1e00e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.278677] env[63379]: DEBUG oslo_concurrency.lockutils [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.305323] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.308406] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.551s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.308709] env[63379]: DEBUG nova.objects.instance [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lazy-loading 'resources' on Instance uuid d47be684-6cd8-45c6-8c6a-9a6db0390f97 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1606.373701] env[63379]: DEBUG oslo_vmware.api [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779572, 'name': PowerOffVM_Task, 'duration_secs': 0.201357} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.374062] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1606.374792] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1606.374847] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b1eea84-0c6c-4225-837c-28fe15eab2ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.463506] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1606.463843] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1606.464027] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleting the datastore file [datastore1] 2a996f06-542e-4f71-95a4-0f71097d1478 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1606.464325] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1b9fcf7-d7ec-40e6-9ce0-15b00b91c14a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.472899] env[63379]: DEBUG oslo_vmware.api [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1606.472899] env[63379]: value = "task-1779574" [ 1606.472899] env[63379]: _type = "Task" [ 1606.472899] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.480589] env[63379]: DEBUG oslo_vmware.api [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.698346] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.741546] env[63379]: DEBUG nova.network.neutron [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1606.828178] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7f6a3c-b819-4016-95e3-e4e60ed02f97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.832025] env[63379]: DEBUG nova.network.neutron [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.854071] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Releasing lock "refresh_cache-ac596f08-86a3-42e0-86e6-41a173fe868f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.854646] env[63379]: DEBUG nova.compute.manager [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1606.854991] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1606.855950] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c143218d-e987-4a86-b026-12ed49c51ed2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.862022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cedf563-34d3-46c5-a6cf-bbc12bc8a170 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.652716] env[63379]: DEBUG nova.network.neutron [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Successfully updated port: 93c003c1-3952-4c3f-ac43-f471addf4090 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1607.656959] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1607.657449] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369389', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'name': 'volume-3c7b78db-230f-4c84-98a6-2e17f07510bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '41952d7b-ce23-4e9b-8843-bbac1d3099c1', 'attached_at': '', 'detached_at': '', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'serial': '3c7b78db-230f-4c84-98a6-2e17f07510bc'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1607.657760] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef7f56e7-6b54-4e9f-b59f-290b32f364eb tempest-ServerActionsV293TestJSON-711435588 tempest-ServerActionsV293TestJSON-711435588-project-member] Lock "a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.850s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.661050] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacd19c9-e8e6-459e-af23-2ed55512feef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.667262] env[63379]: DEBUG nova.compute.manager [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Received event network-vif-plugged-93c003c1-3952-4c3f-ac43-f471addf4090 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1607.667262] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] Acquiring lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.667375] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.667833] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.667833] env[63379]: DEBUG nova.compute.manager [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] No waiting events found dispatching network-vif-plugged-93c003c1-3952-4c3f-ac43-f471addf4090 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1607.667833] env[63379]: WARNING nova.compute.manager [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Received unexpected event network-vif-plugged-93c003c1-3952-4c3f-ac43-f471addf4090 for instance with vm_state building and task_state spawning. [ 1607.667974] env[63379]: DEBUG nova.compute.manager [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Received event network-changed-93c003c1-3952-4c3f-ac43-f471addf4090 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1607.668144] env[63379]: DEBUG nova.compute.manager [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Refreshing instance network info cache due to event network-changed-93c003c1-3952-4c3f-ac43-f471addf4090. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1607.668322] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] Acquiring lock "refresh_cache-8b07ef47-3615-41a5-acfd-87c1ad43b4b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.668456] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] Acquired lock "refresh_cache-8b07ef47-3615-41a5-acfd-87c1ad43b4b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.668613] env[63379]: DEBUG nova.network.neutron [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Refreshing network info cache for port 93c003c1-3952-4c3f-ac43-f471addf4090 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1607.680100] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance 'f082cdd7-228e-4100-b301-5af6daea9b36' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1607.697408] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1607.700777] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b351af3-70f3-47d5-80bb-5ccaa1e43654 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.703830] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2144e894-e35e-4038-b288-cf702d52d1aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.714229] env[63379]: DEBUG oslo_vmware.api [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154945} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.715210] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.715403] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1607.715603] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1607.715780] env[63379]: INFO nova.compute.manager [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Took 1.87 seconds to destroy the instance on the hypervisor. [ 1607.716033] env[63379]: DEBUG oslo.service.loopingcall [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1607.716348] env[63379]: DEBUG nova.compute.manager [-] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1607.716449] env[63379]: DEBUG nova.network.neutron [-] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1607.739325] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] volume-3c7b78db-230f-4c84-98a6-2e17f07510bc/volume-3c7b78db-230f-4c84-98a6-2e17f07510bc.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1607.744606] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90d958c6-595c-4582-aa79-c0439a644873 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.758201] env[63379]: DEBUG oslo_vmware.api [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1607.758201] env[63379]: value = "task-1779575" [ 1607.758201] env[63379]: _type = "Task" [ 1607.758201] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.759031] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1607.759434] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1607.769144] env[63379]: DEBUG oslo_vmware.api [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1607.769144] env[63379]: value = "task-1779576" [ 1607.769144] env[63379]: _type = "Task" [ 1607.769144] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.772727] env[63379]: DEBUG oslo_vmware.api [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.781821] env[63379]: DEBUG oslo_vmware.api [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779576, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.171247] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "refresh_cache-8b07ef47-3615-41a5-acfd-87c1ad43b4b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.171621] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.171830] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.172040] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.172230] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.172397] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.175154] env[63379]: INFO nova.compute.manager [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Terminating instance [ 1608.177327] env[63379]: DEBUG nova.compute.manager [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1608.177531] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1608.178567] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb09bc52-d8a3-4200-abb9-75e7a4778113 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.193142] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1608.193142] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f800089-ee3f-41ea-8a70-be8a61b5dcea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.200173] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1608.200525] env[63379]: DEBUG oslo_vmware.api [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1608.200525] env[63379]: value = "task-1779577" [ 1608.200525] env[63379]: _type = "Task" [ 1608.200525] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.201016] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a33a5dc1-9727-4127-bde5-2aaad577b9f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.210247] env[63379]: DEBUG oslo_vmware.api [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.212605] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb5771c-7ef1-4524-9adc-4a61c22c6b7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.215521] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1608.215521] env[63379]: value = "task-1779578" [ 1608.215521] env[63379]: _type = "Task" [ 1608.215521] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.222152] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cd7ed1-5331-4919-923b-eed7f6967b0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.227956] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.255498] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa69691-c371-44e4-a06e-4da5f814ac09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.266272] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298e5192-9aa7-455e-b9a8-aa2a621107ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.284871] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1608.284994] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1608.286338] env[63379]: DEBUG oslo_vmware.api [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779575, 'name': PowerOffVM_Task, 'duration_secs': 0.204761} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.296581] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1608.296788] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1608.297305] env[63379]: DEBUG nova.compute.provider_tree [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1608.301484] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4568d90d-6a8e-40ae-b1ab-6f5188b2489c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.303410] env[63379]: DEBUG oslo_vmware.api [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779576, 'name': ReconfigVM_Task, 'duration_secs': 0.429112} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.305017] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Reconfigured VM instance instance-00000016 to attach disk [datastore1] volume-3c7b78db-230f-4c84-98a6-2e17f07510bc/volume-3c7b78db-230f-4c84-98a6-2e17f07510bc.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1608.309725] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9acd20d-2568-4197-a3f3-ee37fa494aab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.322394] env[63379]: DEBUG nova.network.neutron [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1608.330779] env[63379]: DEBUG oslo_vmware.api [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1608.330779] env[63379]: value = "task-1779580" [ 1608.330779] env[63379]: _type = "Task" [ 1608.330779] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.340211] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1608.340533] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1608.340736] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Deleting the datastore file [datastore1] ac596f08-86a3-42e0-86e6-41a173fe868f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1608.344726] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1389e6f-1337-494a-8834-b35c01ac214a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.348103] env[63379]: DEBUG oslo_vmware.api [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779580, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.352685] env[63379]: DEBUG oslo_vmware.api [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for the task: (returnval){ [ 1608.352685] env[63379]: value = "task-1779581" [ 1608.352685] env[63379]: _type = "Task" [ 1608.352685] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.362964] env[63379]: DEBUG oslo_vmware.api [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.374659] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "5aad86f8-0b3b-43ca-982b-c670e3411c01" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.374914] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.462131] env[63379]: DEBUG nova.network.neutron [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.622702] env[63379]: DEBUG nova.network.neutron [-] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.712539] env[63379]: DEBUG oslo_vmware.api [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779577, 'name': PowerOffVM_Task, 'duration_secs': 0.202775} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.713231] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1608.713592] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1608.713937] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e014163-b464-433c-a36b-07fdc1e9d5b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.723729] env[63379]: DEBUG oslo_vmware.api [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779578, 'name': PowerOnVM_Task, 'duration_secs': 0.427233} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.724175] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1608.729019] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-32126942-421f-439d-abbb-2316d7b472b3 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance 'f082cdd7-228e-4100-b301-5af6daea9b36' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1608.806314] env[63379]: DEBUG nova.scheduler.client.report [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1608.844288] env[63379]: DEBUG oslo_vmware.api [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779580, 'name': ReconfigVM_Task, 'duration_secs': 0.157493} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.845978] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369389', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'name': 'volume-3c7b78db-230f-4c84-98a6-2e17f07510bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '41952d7b-ce23-4e9b-8843-bbac1d3099c1', 'attached_at': '', 'detached_at': '', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'serial': '3c7b78db-230f-4c84-98a6-2e17f07510bc'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1608.866939] env[63379]: DEBUG oslo_vmware.api [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Task: {'id': task-1779581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159651} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.867425] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1608.867722] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1608.868017] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1608.868299] env[63379]: INFO nova.compute.manager [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Took 2.01 seconds to destroy the instance on the hypervisor. [ 1608.868635] env[63379]: DEBUG oslo.service.loopingcall [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1608.869197] env[63379]: DEBUG nova.compute.manager [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1608.869383] env[63379]: DEBUG nova.network.neutron [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1608.877807] env[63379]: DEBUG nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1608.885021] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1608.885021] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1608.885021] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Deleting the datastore file [datastore1] 941ac23c-6aa9-4ed1-840a-326423b7cbc0 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1608.885021] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44efc440-f71e-410d-8e78-f1c665fd993f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.887785] env[63379]: DEBUG nova.network.neutron [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1608.898302] env[63379]: DEBUG oslo_vmware.api [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for the task: (returnval){ [ 1608.898302] env[63379]: value = "task-1779583" [ 1608.898302] env[63379]: _type = "Task" [ 1608.898302] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.908626] env[63379]: DEBUG oslo_vmware.api [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.964124] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc09be8d-1f6a-445b-8808-b6a3ec7a8c67 req-50b50206-bdcb-4a44-b6f0-44173efebe47 service nova] Releasing lock "refresh_cache-8b07ef47-3615-41a5-acfd-87c1ad43b4b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.964233] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "refresh_cache-8b07ef47-3615-41a5-acfd-87c1ad43b4b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.964461] env[63379]: DEBUG nova.network.neutron [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1609.124930] env[63379]: INFO nova.compute.manager [-] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Took 1.41 seconds to deallocate network for instance. [ 1609.319425] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.004s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.320308] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.716s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.321522] env[63379]: DEBUG nova.objects.instance [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lazy-loading 'resources' on Instance uuid aedff32b-b0c2-4a93-a2c6-349d26839cc4 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.341766] env[63379]: INFO nova.scheduler.client.report [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Deleted allocations for instance d47be684-6cd8-45c6-8c6a-9a6db0390f97 [ 1609.345286] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.345286] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.347176] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1609.392555] env[63379]: DEBUG nova.network.neutron [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.396680] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.409981] env[63379]: DEBUG oslo_vmware.api [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Task: {'id': task-1779583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.472061} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.411232] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1609.411462] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1609.411677] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1609.411884] env[63379]: INFO nova.compute.manager [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1609.412177] env[63379]: DEBUG oslo.service.loopingcall [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1609.413247] env[63379]: DEBUG nova.compute.manager [req-da198a7d-17f5-4b4a-910f-3798d2b67d4b req-2456386f-c59a-4b76-b7c0-c2ae6154ba1f service nova] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Received event network-vif-deleted-6a39414d-cc4f-4a85-997b-d633aec0bcef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1609.413846] env[63379]: DEBUG nova.compute.manager [-] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1609.413976] env[63379]: DEBUG nova.network.neutron [-] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1609.532517] env[63379]: DEBUG nova.network.neutron [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1609.634930] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.857254] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a13046a8-d32e-49a7-a044-02ed2cd3a9cd tempest-VolumesAssistedSnapshotsTest-122783605 tempest-VolumesAssistedSnapshotsTest-122783605-project-member] Lock "d47be684-6cd8-45c6-8c6a-9a6db0390f97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.462s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.865201] env[63379]: DEBUG nova.network.neutron [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Updating instance_info_cache with network_info: [{"id": "93c003c1-3952-4c3f-ac43-f471addf4090", "address": "fa:16:3e:06:58:49", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93c003c1-39", "ovs_interfaceid": "93c003c1-3952-4c3f-ac43-f471addf4090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.892742] env[63379]: DEBUG nova.objects.instance [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lazy-loading 'flavor' on Instance uuid 41952d7b-ce23-4e9b-8843-bbac1d3099c1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.894246] env[63379]: INFO nova.compute.manager [-] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Took 1.02 seconds to deallocate network for instance. [ 1610.343509] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29343954-f15c-41d1-9cae-50d8dc70c568 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.352480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe783a5-b931-4c00-9b14-340fafe498fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.385511] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "refresh_cache-8b07ef47-3615-41a5-acfd-87c1ad43b4b9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.385511] env[63379]: DEBUG nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Instance network_info: |[{"id": "93c003c1-3952-4c3f-ac43-f471addf4090", "address": "fa:16:3e:06:58:49", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93c003c1-39", "ovs_interfaceid": "93c003c1-3952-4c3f-ac43-f471addf4090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1610.385511] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:58:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93c003c1-3952-4c3f-ac43-f471addf4090', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1610.392501] env[63379]: DEBUG oslo.service.loopingcall [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1610.393302] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab4f8a4-e65c-4df0-97ee-8c3436ee2183 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.396165] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1610.396479] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6734842f-69e1-40a3-8745-e9c3563784ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.413763] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.416737] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01877a7d-de59-4ff2-b0b1-bca629d6551a tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.413s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.419495] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4f31e3-445d-4202-90f6-c7ef0f399909 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.426854] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1610.426854] env[63379]: value = "task-1779584" [ 1610.426854] env[63379]: _type = "Task" [ 1610.426854] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.439974] env[63379]: DEBUG nova.compute.provider_tree [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1610.446862] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779584, 'name': CreateVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.668213] env[63379]: DEBUG nova.network.neutron [-] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.772333] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.772608] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.943996] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779584, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.943996] env[63379]: DEBUG nova.scheduler.client.report [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1610.975540] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [{"id": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "address": "fa:16:3e:d2:92:4e", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape034314c-72", "ovs_interfaceid": "e034314c-72fb-4187-9c6b-1cd2e95aa97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.170563] env[63379]: INFO nova.compute.manager [-] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Took 1.76 seconds to deallocate network for instance. [ 1611.275818] env[63379]: INFO nova.compute.manager [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Detaching volume 3c7b78db-230f-4c84-98a6-2e17f07510bc [ 1611.315064] env[63379]: INFO nova.virt.block_device [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Attempting to driver detach volume 3c7b78db-230f-4c84-98a6-2e17f07510bc from mountpoint /dev/sdb [ 1611.315064] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1611.315064] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369389', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'name': 'volume-3c7b78db-230f-4c84-98a6-2e17f07510bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '41952d7b-ce23-4e9b-8843-bbac1d3099c1', 'attached_at': '', 'detached_at': '', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'serial': '3c7b78db-230f-4c84-98a6-2e17f07510bc'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1611.315064] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20e6029-8fde-4f7d-bcdd-41b6e1a0222c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.340100] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53fe09f-9363-4bd3-9a72-bf6d1f869c2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.349022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d77f839-7974-47cb-a20e-4867fcac778c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.369932] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d786dc-9b06-4156-90d5-38a525796317 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.386459] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] The volume has not been displaced from its original location: [datastore1] volume-3c7b78db-230f-4c84-98a6-2e17f07510bc/volume-3c7b78db-230f-4c84-98a6-2e17f07510bc.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1611.392610] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Reconfiguring VM instance instance-00000016 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1611.394025] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f73b34ab-400b-40a2-b216-2d8412162d91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.415172] env[63379]: DEBUG oslo_vmware.api [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1611.415172] env[63379]: value = "task-1779585" [ 1611.415172] env[63379]: _type = "Task" [ 1611.415172] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.425244] env[63379]: DEBUG oslo_vmware.api [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779585, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.437566] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779584, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.457967] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.142s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.460036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.101s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.460364] env[63379]: DEBUG nova.objects.instance [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lazy-loading 'resources' on Instance uuid 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1611.479153] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.479153] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 1611.480022] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.480849] env[63379]: DEBUG nova.compute.manager [req-cfea3c5e-44bd-40bc-8cca-a7f88aa738e7 req-6602b1a8-708b-4a7b-90f5-1a34f25ed97c service nova] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Received event network-vif-deleted-d2e80ecc-8309-4e64-b962-762c8535bf0a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1611.482127] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.482766] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.483430] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.483430] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.483430] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.483430] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1611.483430] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.492539] env[63379]: INFO nova.scheduler.client.report [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Deleted allocations for instance aedff32b-b0c2-4a93-a2c6-349d26839cc4 [ 1611.678116] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.735041] env[63379]: DEBUG nova.network.neutron [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Port bbe843e8-9156-454e-8ba4-dae6bc31c8b2 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1611.735385] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.735558] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.735818] env[63379]: DEBUG nova.network.neutron [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1611.926090] env[63379]: DEBUG oslo_vmware.api [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779585, 'name': ReconfigVM_Task, 'duration_secs': 0.496012} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.926382] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Reconfigured VM instance instance-00000016 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1611.931247] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60114569-1d4c-4fee-8817-ec23951c967e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.949584] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779584, 'name': CreateVM_Task, 'duration_secs': 1.36458} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.951058] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1611.951452] env[63379]: DEBUG oslo_vmware.api [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1611.951452] env[63379]: value = "task-1779586" [ 1611.951452] env[63379]: _type = "Task" [ 1611.951452] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.952093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.952267] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.952668] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1611.952980] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c630b3dc-1bff-4a81-8603-063f7798563c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.964489] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1611.964489] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52924769-a6fb-ab1d-8375-6288b540dd0a" [ 1611.964489] env[63379]: _type = "Task" [ 1611.964489] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.967735] env[63379]: DEBUG oslo_vmware.api [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779586, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.978209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.978493] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Processing image c9c544a4-5a35-4c31-896a-05c58c561419 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1611.978738] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419/c9c544a4-5a35-4c31-896a-05c58c561419.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.978893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419/c9c544a4-5a35-4c31-896a-05c58c561419.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.979090] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1611.979364] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b34daa46-a816-43cc-a01a-019c5d861dba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.986672] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.988105] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1611.988580] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1611.991587] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85b724a4-03c4-4e1f-8533-d4418027712e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.997123] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1611.997123] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524da72d-6f0b-6525-efc1-01627db92a18" [ 1611.997123] env[63379]: _type = "Task" [ 1611.997123] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.004401] env[63379]: DEBUG oslo_concurrency.lockutils [None req-48101c4a-957c-468f-97ce-d097b45d0130 tempest-AttachInterfacesUnderV243Test-452658908 tempest-AttachInterfacesUnderV243Test-452658908-project-member] Lock "aedff32b-b0c2-4a93-a2c6-349d26839cc4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.873s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.011672] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524da72d-6f0b-6525-efc1-01627db92a18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.403445] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a28244-8322-41da-a439-2aec0245da3e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.415231] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e25e56-d3cb-43e8-9975-7719ca3003ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.451204] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb14938-8069-4718-a03b-b1f569b0d47c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.467105] env[63379]: DEBUG oslo_vmware.api [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779586, 'name': ReconfigVM_Task, 'duration_secs': 0.154636} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.468342] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d0cd3a-20db-4b7a-abc3-95ce298dcd4d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.472292] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369389', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'name': 'volume-3c7b78db-230f-4c84-98a6-2e17f07510bc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '41952d7b-ce23-4e9b-8843-bbac1d3099c1', 'attached_at': '', 'detached_at': '', 'volume_id': '3c7b78db-230f-4c84-98a6-2e17f07510bc', 'serial': '3c7b78db-230f-4c84-98a6-2e17f07510bc'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1612.484769] env[63379]: DEBUG nova.compute.provider_tree [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.509922] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Preparing fetch location {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1612.510202] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Fetch image to [datastore1] OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97/OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97.vmdk {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1612.510385] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Downloading stream optimized image c9c544a4-5a35-4c31-896a-05c58c561419 to [datastore1] OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97/OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97.vmdk on the data store datastore1 as vApp {{(pid=63379) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1612.510549] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Downloading image file data c9c544a4-5a35-4c31-896a-05c58c561419 to the ESX as VM named 'OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97' {{(pid=63379) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1612.610306] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1612.610306] env[63379]: value = "resgroup-9" [ 1612.610306] env[63379]: _type = "ResourcePool" [ 1612.610306] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1612.610306] env[63379]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-60debdaa-ff68-422c-a42f-7ae6ab5fc773 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.631724] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease: (returnval){ [ 1612.631724] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5203d338-051c-68bd-e3b3-66a203bf5d6f" [ 1612.631724] env[63379]: _type = "HttpNfcLease" [ 1612.631724] env[63379]: } obtained for vApp import into resource pool (val){ [ 1612.631724] env[63379]: value = "resgroup-9" [ 1612.631724] env[63379]: _type = "ResourcePool" [ 1612.631724] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1612.633307] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the lease: (returnval){ [ 1612.633307] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5203d338-051c-68bd-e3b3-66a203bf5d6f" [ 1612.633307] env[63379]: _type = "HttpNfcLease" [ 1612.633307] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1612.641246] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1612.641246] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5203d338-051c-68bd-e3b3-66a203bf5d6f" [ 1612.641246] env[63379]: _type = "HttpNfcLease" [ 1612.641246] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1612.682585] env[63379]: DEBUG nova.network.neutron [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance_info_cache with network_info: [{"id": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "address": "fa:16:3e:0c:14:52", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbe843e8-91", "ovs_interfaceid": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.990243] env[63379]: DEBUG nova.scheduler.client.report [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1613.032999] env[63379]: DEBUG nova.objects.instance [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lazy-loading 'flavor' on Instance uuid 41952d7b-ce23-4e9b-8843-bbac1d3099c1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.143386] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1613.143386] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5203d338-051c-68bd-e3b3-66a203bf5d6f" [ 1613.143386] env[63379]: _type = "HttpNfcLease" [ 1613.143386] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1613.143711] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1613.143711] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5203d338-051c-68bd-e3b3-66a203bf5d6f" [ 1613.143711] env[63379]: _type = "HttpNfcLease" [ 1613.143711] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1613.147628] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e413de-5d7a-4918-81e5-1bec1d4d22ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.163925] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52925427-512e-6706-ae5c-c514f7db4b28/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1613.164143] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52925427-512e-6706-ae5c-c514f7db4b28/disk-0.vmdk. {{(pid=63379) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1613.235361] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.243543] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3bbd6b02-3614-47b8-8181-adbb193fac38 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.497981] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.038s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.501409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.840s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.501771] env[63379]: DEBUG nova.objects.instance [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lazy-loading 'resources' on Instance uuid 6e022c9a-642b-4d96-8195-e56809bbd7b9 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.539312] env[63379]: INFO nova.scheduler.client.report [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Deleted allocations for instance 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1 [ 1613.741908] env[63379]: DEBUG nova.compute.manager [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63379) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1613.742171] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.902326] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Completed reading data from the image iterator. {{(pid=63379) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1613.903121] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52925427-512e-6706-ae5c-c514f7db4b28/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1613.904097] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81d380b-6753-4428-a3e5-f40fddc2cc98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.913745] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52925427-512e-6706-ae5c-c514f7db4b28/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1613.913928] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52925427-512e-6706-ae5c-c514f7db4b28/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1613.914196] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-de62397b-d3cb-4ce9-9243-d0a5a1d16727 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.045522] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ef34da7e-d139-4900-aae1-1283596b8737 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.272s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.050523] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a5b924b1-b37b-41be-a210-27cdf0187536 tempest-ServersTestBootFromVolume-1966638433 tempest-ServersTestBootFromVolume-1966638433-project-member] Lock "15d19ce3-ea71-47ff-a738-9ba00b8dfcf1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.890s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.281904] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52925427-512e-6706-ae5c-c514f7db4b28/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1614.282255] env[63379]: INFO nova.virt.vmwareapi.images [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Downloaded image file data c9c544a4-5a35-4c31-896a-05c58c561419 [ 1614.283154] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53c7aec-8894-47a9-ad3c-c79ca63dc705 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.306159] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51244509-a5dd-43c0-b68c-8d037659cca3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.353827] env[63379]: INFO nova.virt.vmwareapi.images [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] The imported VM was unregistered [ 1614.356272] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Caching image {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1614.356601] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating directory with path [datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1614.359498] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cf956d4-4259-44a8-a09d-dc4366810de0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.380572] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created directory with path [datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1614.380824] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97/OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97.vmdk to [datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419/c9c544a4-5a35-4c31-896a-05c58c561419.vmdk. {{(pid=63379) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1614.381138] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-eefefa94-914b-414e-8e5f-80e2917f1bf0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.389327] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1614.389327] env[63379]: value = "task-1779589" [ 1614.389327] env[63379]: _type = "Task" [ 1614.389327] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.403707] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779589, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.526383] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f2f6c3-6c56-4a73-be49-b9e363a89670 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.537057] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1df6574-348d-4b74-8a0e-7115ce3097a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.575116] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce31fc94-9598-46b0-96ff-b2e53308e207 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.583562] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0bdc45-8413-4893-bff5-697188c572af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.600785] env[63379]: DEBUG nova.compute.provider_tree [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1614.903016] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779589, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.006256] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.006467] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.104784] env[63379]: DEBUG nova.scheduler.client.report [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1615.412168] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779589, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.511278] env[63379]: DEBUG nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1615.613077] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.112s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.615824] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.407s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.617934] env[63379]: INFO nova.compute.claims [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1615.649719] env[63379]: INFO nova.scheduler.client.report [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Deleted allocations for instance 6e022c9a-642b-4d96-8195-e56809bbd7b9 [ 1615.913703] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779589, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.039134] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.159387] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da719b45-2989-4e67-af4a-4813f084d26d tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "6e022c9a-642b-4d96-8195-e56809bbd7b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.953s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.410338] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779589, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.914090] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779589, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.187110] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cb2b32-a266-4c14-860c-2236b50ef8b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.194982] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3624fe6d-8de4-47b0-81aa-3a4fb6acb9b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.225526] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7ca7b1-66b0-481d-b496-779698ffa4da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.233153] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f44384-2775-4d2f-8bf5-c77deadb8c0f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.247077] env[63379]: DEBUG nova.compute.provider_tree [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.409841] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779589, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.829623} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.410443] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97/OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97.vmdk to [datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419/c9c544a4-5a35-4c31-896a-05c58c561419.vmdk. [ 1617.410641] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Cleaning up location [datastore1] OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1617.410929] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a0dfd090-ad03-4b7e-92e1-f1740a23ff97 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1617.411085] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0db760b9-4360-424c-8ad3-fdc9b158d6a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.418382] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1617.418382] env[63379]: value = "task-1779590" [ 1617.418382] env[63379]: _type = "Task" [ 1617.418382] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.426857] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.750667] env[63379]: DEBUG nova.scheduler.client.report [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1617.929266] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098555} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.929704] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1617.929704] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419/c9c544a4-5a35-4c31-896a-05c58c561419.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.930047] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419/c9c544a4-5a35-4c31-896a-05c58c561419.vmdk to [datastore1] 8b07ef47-3615-41a5-acfd-87c1ad43b4b9/8b07ef47-3615-41a5-acfd-87c1ad43b4b9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1617.930319] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af1953d9-c1ef-47e2-9cf5-a3c5a50ccac8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.940216] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1617.940216] env[63379]: value = "task-1779591" [ 1617.940216] env[63379]: _type = "Task" [ 1617.940216] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.951463] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.256395] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.259052] env[63379]: DEBUG nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1618.262642] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.052s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.264746] env[63379]: INFO nova.compute.claims [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1618.457199] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779591, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.488936] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.489311] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.489582] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.489858] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.490082] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.494025] env[63379]: INFO nova.compute.manager [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Terminating instance [ 1618.495486] env[63379]: DEBUG nova.compute.manager [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1618.495678] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1618.496690] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e87e63a-189d-4436-9528-d88f5bcb8675 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.506060] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1618.506339] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2231b75f-f9bf-41f2-b158-77868c632c84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.513400] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1618.513400] env[63379]: value = "task-1779592" [ 1618.513400] env[63379]: _type = "Task" [ 1618.513400] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.524211] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.769985] env[63379]: DEBUG nova.compute.utils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1618.776844] env[63379]: DEBUG nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1618.777100] env[63379]: DEBUG nova.network.neutron [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1618.893038] env[63379]: DEBUG nova.policy [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87416db304754ced85d79d6d30ca2241', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce15a519ec5744feb0731439b2534fc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1618.956074] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779591, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.023828] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779592, 'name': PowerOffVM_Task, 'duration_secs': 0.403655} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.024148] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1619.024559] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1619.024652] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a132dba-06ee-40d0-840c-5f54c7336a25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.124401] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1619.124630] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1619.124805] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Deleting the datastore file [datastore1] 04234ba7-24a3-48e5-9f62-6f4dddd0054a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1619.125143] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8306f5b8-d746-4116-ab4b-dd235a56e07f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.132970] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for the task: (returnval){ [ 1619.132970] env[63379]: value = "task-1779594" [ 1619.132970] env[63379]: _type = "Task" [ 1619.132970] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.142324] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.275920] env[63379]: DEBUG nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1619.459606] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779591, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.555067] env[63379]: DEBUG nova.network.neutron [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Successfully created port: 2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1619.649648] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.795893] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4ffb31-e29a-4dea-b8f8-8dfab2cb1664 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.806570] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ed8678-7bfc-497b-9ba5-7b39b98b11be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.842927] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359fbccf-f0cf-44cf-ae04-2b4457d0aa45 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.854418] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23089c3-fff8-44e9-969f-36a4cc81f8f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.873024] env[63379]: DEBUG nova.compute.provider_tree [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.953715] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779591, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.150894] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.304225] env[63379]: DEBUG nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1620.343718] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1620.345252] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1620.345459] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1620.345763] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1620.345859] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1620.346028] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1620.346264] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1620.346430] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1620.346607] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1620.346800] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1620.346984] env[63379]: DEBUG nova.virt.hardware [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1620.347940] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9323354-a7b4-4e0a-8c72-3e1c78c19dba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.361565] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3862a019-99dc-4d86-af95-a16a3779ace9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.378296] env[63379]: DEBUG nova.scheduler.client.report [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1620.464662] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779591, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.649341] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.809381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.809381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.885601] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.886178] env[63379]: DEBUG nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1620.888789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.572s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.890560] env[63379]: INFO nova.compute.claims [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1620.957325] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779591, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.772972} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.957692] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c9c544a4-5a35-4c31-896a-05c58c561419/c9c544a4-5a35-4c31-896a-05c58c561419.vmdk to [datastore1] 8b07ef47-3615-41a5-acfd-87c1ad43b4b9/8b07ef47-3615-41a5-acfd-87c1ad43b4b9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1620.958698] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9dd52d-3f96-4c9b-a593-4e4dadeab387 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.984804] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 8b07ef47-3615-41a5-acfd-87c1ad43b4b9/8b07ef47-3615-41a5-acfd-87c1ad43b4b9.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1620.985072] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f51072b7-0973-4381-80e1-3a22ffd06c79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.006121] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1621.006121] env[63379]: value = "task-1779595" [ 1621.006121] env[63379]: _type = "Task" [ 1621.006121] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.014600] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779595, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.149204] env[63379]: DEBUG oslo_vmware.api [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Task: {'id': task-1779594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.596835} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.149204] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1621.149204] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1621.149204] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1621.149204] env[63379]: INFO nova.compute.manager [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Took 2.65 seconds to destroy the instance on the hypervisor. [ 1621.149204] env[63379]: DEBUG oslo.service.loopingcall [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1621.149204] env[63379]: DEBUG nova.compute.manager [-] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1621.149204] env[63379]: DEBUG nova.network.neutron [-] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1621.311328] env[63379]: DEBUG nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1621.398948] env[63379]: DEBUG nova.compute.utils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1621.400483] env[63379]: DEBUG nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1621.400483] env[63379]: DEBUG nova.network.neutron [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1621.460258] env[63379]: DEBUG nova.policy [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06eb1221300e4d969a2c7fc92d8dc3e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3562bb229474ba7aa3dae98def05260', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1621.499175] env[63379]: DEBUG nova.compute.manager [req-40521472-c0a9-4f33-8d72-513d1d5b909f req-7889d882-6b41-478c-92d2-de09afe2a472 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Received event network-vif-deleted-1c6c710f-163e-4747-8489-53e8fdf2cf1f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1621.499380] env[63379]: INFO nova.compute.manager [req-40521472-c0a9-4f33-8d72-513d1d5b909f req-7889d882-6b41-478c-92d2-de09afe2a472 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Neutron deleted interface 1c6c710f-163e-4747-8489-53e8fdf2cf1f; detaching it from the instance and deleting it from the info cache [ 1621.499551] env[63379]: DEBUG nova.network.neutron [req-40521472-c0a9-4f33-8d72-513d1d5b909f req-7889d882-6b41-478c-92d2-de09afe2a472 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.520757] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779595, 'name': ReconfigVM_Task, 'duration_secs': 0.293587} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.521821] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 8b07ef47-3615-41a5-acfd-87c1ad43b4b9/8b07ef47-3615-41a5-acfd-87c1ad43b4b9.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1621.522484] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1cd8d4c7-583f-4ecb-8213-6f0d5edb97af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.530878] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1621.530878] env[63379]: value = "task-1779596" [ 1621.530878] env[63379]: _type = "Task" [ 1621.530878] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.545904] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779596, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.697501] env[63379]: DEBUG nova.compute.manager [req-49f4e5e7-d185-43f4-b8bb-00d92c31c387 req-3a23d6f9-f99e-4abb-bf47-2a2b336a7777 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Received event network-vif-plugged-2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1621.698418] env[63379]: DEBUG oslo_concurrency.lockutils [req-49f4e5e7-d185-43f4-b8bb-00d92c31c387 req-3a23d6f9-f99e-4abb-bf47-2a2b336a7777 service nova] Acquiring lock "19a41941-0679-4971-8a44-c95b13f5c294-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.698758] env[63379]: DEBUG oslo_concurrency.lockutils [req-49f4e5e7-d185-43f4-b8bb-00d92c31c387 req-3a23d6f9-f99e-4abb-bf47-2a2b336a7777 service nova] Lock "19a41941-0679-4971-8a44-c95b13f5c294-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.698998] env[63379]: DEBUG oslo_concurrency.lockutils [req-49f4e5e7-d185-43f4-b8bb-00d92c31c387 req-3a23d6f9-f99e-4abb-bf47-2a2b336a7777 service nova] Lock "19a41941-0679-4971-8a44-c95b13f5c294-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.699574] env[63379]: DEBUG nova.compute.manager [req-49f4e5e7-d185-43f4-b8bb-00d92c31c387 req-3a23d6f9-f99e-4abb-bf47-2a2b336a7777 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] No waiting events found dispatching network-vif-plugged-2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1621.699858] env[63379]: WARNING nova.compute.manager [req-49f4e5e7-d185-43f4-b8bb-00d92c31c387 req-3a23d6f9-f99e-4abb-bf47-2a2b336a7777 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Received unexpected event network-vif-plugged-2d279162-72d1-4378-b83d-c80b2815f680 for instance with vm_state building and task_state spawning. [ 1621.836469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.904866] env[63379]: DEBUG nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1621.962493] env[63379]: DEBUG nova.network.neutron [-] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.006336] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5dd01f2-5dde-4751-bf24-8679a5f51117 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.018218] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1c60c2-3cce-4977-8bcf-dc54bf0758f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.029479] env[63379]: DEBUG nova.network.neutron [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Successfully updated port: 2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1622.041764] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779596, 'name': Rename_Task, 'duration_secs': 0.142715} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.042578] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1622.055416] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1144f8b8-4501-46ad-9a00-0ba0175ce9d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.057296] env[63379]: DEBUG nova.compute.manager [req-40521472-c0a9-4f33-8d72-513d1d5b909f req-7889d882-6b41-478c-92d2-de09afe2a472 service nova] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Detach interface failed, port_id=1c6c710f-163e-4747-8489-53e8fdf2cf1f, reason: Instance 04234ba7-24a3-48e5-9f62-6f4dddd0054a could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1622.065823] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1622.065823] env[63379]: value = "task-1779597" [ 1622.065823] env[63379]: _type = "Task" [ 1622.065823] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.078376] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779597, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.143038] env[63379]: DEBUG nova.network.neutron [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Successfully created port: 9e2aaa43-4ac9-490a-a951-3521757945cd {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1622.405528] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fcf4c3-cb0c-4455-a217-72344ce8d8dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.421699] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f1584d-dfff-4f7d-b35b-3888423df81c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.467567] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ffd5f4-5fc1-4c4f-9a9f-079af985da6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.471832] env[63379]: INFO nova.compute.manager [-] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Took 1.32 seconds to deallocate network for instance. [ 1622.480694] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc0e0e9-4eae-4509-a161-1d70594b2ae0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.498758] env[63379]: DEBUG nova.compute.provider_tree [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.534322] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.534403] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.534587] env[63379]: DEBUG nova.network.neutron [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1622.579360] env[63379]: DEBUG oslo_vmware.api [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779597, 'name': PowerOnVM_Task, 'duration_secs': 0.499068} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.580272] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1622.580272] env[63379]: INFO nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Took 16.35 seconds to spawn the instance on the hypervisor. [ 1622.580272] env[63379]: DEBUG nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1622.582993] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37449c78-6d2e-4698-ad5f-7bfbdbe35bed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.926604] env[63379]: DEBUG nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1622.967034] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1622.967444] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1622.967809] env[63379]: DEBUG nova.virt.hardware [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1622.968739] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c5a1f6-3bfa-46c0-a327-1543a29d616e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.977825] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f94ea7-b281-4d4b-93a9-195b461feaa0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.982694] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.003044] env[63379]: DEBUG nova.scheduler.client.report [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1623.096197] env[63379]: DEBUG nova.network.neutron [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1623.103529] env[63379]: INFO nova.compute.manager [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Took 43.06 seconds to build instance. [ 1623.357693] env[63379]: DEBUG nova.network.neutron [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.506770] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.507121] env[63379]: DEBUG nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1623.509640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.263s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.511081] env[63379]: INFO nova.compute.claims [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1623.606481] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37fd8c28-828a-4d19-90fd-3201c3d0cd36 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.574s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.773432] env[63379]: DEBUG nova.compute.manager [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Received event network-changed-2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1623.773722] env[63379]: DEBUG nova.compute.manager [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing instance network info cache due to event network-changed-2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1623.774238] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] Acquiring lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.860634] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.860960] env[63379]: DEBUG nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Instance network_info: |[{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1623.861294] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] Acquired lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.861489] env[63379]: DEBUG nova.network.neutron [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1623.862684] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:45:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '822050c7-1845-485d-b87e-73778d21c33c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d279162-72d1-4378-b83d-c80b2815f680', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1623.870883] env[63379]: DEBUG oslo.service.loopingcall [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1623.873882] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1623.874606] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f9eaa38-9754-4bb8-a236-817d4f00514f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.901364] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1623.901364] env[63379]: value = "task-1779598" [ 1623.901364] env[63379]: _type = "Task" [ 1623.901364] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.910649] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779598, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.016073] env[63379]: DEBUG nova.compute.utils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1624.017547] env[63379]: DEBUG nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1624.017710] env[63379]: DEBUG nova.network.neutron [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1624.075633] env[63379]: DEBUG nova.policy [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a09fcc05b7d4239bcd13389bb41ebf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6552f9956224ba5a0a01328da741242', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1624.206292] env[63379]: DEBUG nova.network.neutron [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Successfully updated port: 9e2aaa43-4ac9-490a-a951-3521757945cd {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1624.311998] env[63379]: DEBUG nova.network.neutron [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updated VIF entry in instance network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1624.312405] env[63379]: DEBUG nova.network.neutron [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.336955] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.337282] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.337486] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.337673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.337852] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.342027] env[63379]: INFO nova.compute.manager [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Terminating instance [ 1624.344318] env[63379]: DEBUG nova.compute.manager [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1624.344498] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.345711] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e6955e-5dd8-4eb2-a5e4-670707707626 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.353101] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.353364] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-907aa058-9783-4267-baf5-99399f446cfa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.361604] env[63379]: DEBUG oslo_vmware.api [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1624.361604] env[63379]: value = "task-1779599" [ 1624.361604] env[63379]: _type = "Task" [ 1624.361604] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.370674] env[63379]: DEBUG oslo_vmware.api [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.409993] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779598, 'name': CreateVM_Task, 'duration_secs': 0.337545} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.410472] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1624.411243] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.411392] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.411732] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1624.412008] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a722cf1-0ebc-4b00-aebf-cbfdd9b9af92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.417044] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1624.417044] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]523b7fa2-bada-0260-465d-582a9a0517c6" [ 1624.417044] env[63379]: _type = "Task" [ 1624.417044] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.425953] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523b7fa2-bada-0260-465d-582a9a0517c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.493382] env[63379]: DEBUG nova.network.neutron [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Successfully created port: af03ac28-a066-4ffd-ac52-33d4596db87d {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1624.523252] env[63379]: DEBUG nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1624.543507] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.545938] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.546420] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.546420] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.546601] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.548655] env[63379]: INFO nova.compute.manager [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Terminating instance [ 1624.550489] env[63379]: DEBUG nova.compute.manager [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1624.550820] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.551647] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1421a72-e24c-4bce-8904-2a670838250d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.562379] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1624.562906] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9031f1f6-3179-4b82-a5c4-aca90a608ee7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.569118] env[63379]: DEBUG oslo_vmware.api [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1624.569118] env[63379]: value = "task-1779600" [ 1624.569118] env[63379]: _type = "Task" [ 1624.569118] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.584474] env[63379]: DEBUG oslo_vmware.api [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.709556] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.709556] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.709556] env[63379]: DEBUG nova.network.neutron [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1624.818337] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea5ae247-4672-4784-a73c-fe6ed52167aa req-ad1ad3b2-e51d-4b6f-bfcd-31293fca9b1c service nova] Releasing lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.871913] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "510db409-0b4c-494a-8084-39ef3cd6c918" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.872186] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "510db409-0b4c-494a-8084-39ef3cd6c918" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.880961] env[63379]: DEBUG oslo_vmware.api [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779599, 'name': PowerOffVM_Task, 'duration_secs': 0.234906} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.880961] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1624.881298] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1624.881527] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d88eac7-020d-4ba7-a26e-dca7b51e4c6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.933881] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523b7fa2-bada-0260-465d-582a9a0517c6, 'name': SearchDatastore_Task, 'duration_secs': 0.009452} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.937447] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.937722] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1624.938284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.938284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.938389] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1624.938949] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcedb175-9827-48b2-98da-9db5b6356037 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.949568] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1624.949817] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1624.950768] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2bff8e9-00b4-44d1-aa13-6a04f6b9e6da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.957235] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1624.957453] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1624.957637] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Deleting the datastore file [datastore1] 08465a2c-1ab6-4c53-9b12-3cd51c717b03 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1624.959231] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9f74149-b667-4463-8f81-3cac28e7b0b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.962259] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1624.962259] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52713a49-76c9-136c-29ea-691966d040e9" [ 1624.962259] env[63379]: _type = "Task" [ 1624.962259] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.967681] env[63379]: DEBUG oslo_vmware.api [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for the task: (returnval){ [ 1624.967681] env[63379]: value = "task-1779602" [ 1624.967681] env[63379]: _type = "Task" [ 1624.967681] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.975424] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52713a49-76c9-136c-29ea-691966d040e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009167} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.979198] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68e3397f-ff82-4056-8762-b255b514339f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.984791] env[63379]: DEBUG oslo_vmware.api [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.988217] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1624.988217] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f7008a-0a71-49ac-5448-5817eeeb8a20" [ 1624.988217] env[63379]: _type = "Task" [ 1624.988217] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.000691] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f7008a-0a71-49ac-5448-5817eeeb8a20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.049223] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736bc76a-084d-47b2-bd37-9961c50eafaa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.057315] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d0c345-2cb0-4265-a00b-7430a8aed2bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.090881] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d32312-2f89-4571-b139-287137223839 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.098616] env[63379]: DEBUG oslo_vmware.api [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779600, 'name': PowerOffVM_Task, 'duration_secs': 0.29238} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.100619] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1625.100830] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1625.101142] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d456d978-043d-4e68-ae59-443008ef7d43 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.103437] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0cf125-bfcc-48a2-8712-7ee41a76ca56 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.118370] env[63379]: DEBUG nova.compute.provider_tree [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1625.285446] env[63379]: DEBUG nova.network.neutron [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1625.328318] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1625.328742] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1625.328986] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] 8b07ef47-3615-41a5-acfd-87c1ad43b4b9 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1625.329273] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da84a358-4801-4061-bcb3-f2cc679878c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.335220] env[63379]: DEBUG oslo_vmware.api [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1625.335220] env[63379]: value = "task-1779604" [ 1625.335220] env[63379]: _type = "Task" [ 1625.335220] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.345597] env[63379]: DEBUG oslo_vmware.api [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779604, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.374471] env[63379]: DEBUG nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1625.464151] env[63379]: DEBUG nova.network.neutron [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Updating instance_info_cache with network_info: [{"id": "9e2aaa43-4ac9-490a-a951-3521757945cd", "address": "fa:16:3e:2b:4b:b8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2aaa43-4a", "ovs_interfaceid": "9e2aaa43-4ac9-490a-a951-3521757945cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.481414] env[63379]: DEBUG oslo_vmware.api [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Task: {'id': task-1779602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141846} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.481735] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.481936] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.482139] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.482324] env[63379]: INFO nova.compute.manager [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1625.482571] env[63379]: DEBUG oslo.service.loopingcall [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.484830] env[63379]: DEBUG nova.compute.manager [-] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1625.484952] env[63379]: DEBUG nova.network.neutron [-] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1625.503779] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f7008a-0a71-49ac-5448-5817eeeb8a20, 'name': SearchDatastore_Task, 'duration_secs': 0.011446} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.504056] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.504391] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/19a41941-0679-4971-8a44-c95b13f5c294.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1625.504966] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d27389c3-6f23-423f-9797-7675212fc4c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.511624] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1625.511624] env[63379]: value = "task-1779605" [ 1625.511624] env[63379]: _type = "Task" [ 1625.511624] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.519728] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.543398] env[63379]: DEBUG nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1625.571890] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1625.573036] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1625.573036] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1625.573036] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1625.573036] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1625.573389] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1625.573804] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1625.574367] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1625.574747] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1625.577015] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1625.577015] env[63379]: DEBUG nova.virt.hardware [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1625.577015] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75eef9e3-c7ef-47b6-8e57-9e7419087501 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.584945] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46399294-8ff0-46c6-8c57-bf4ae3e36bbb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.624023] env[63379]: DEBUG nova.scheduler.client.report [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1625.855865] env[63379]: DEBUG oslo_vmware.api [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779604, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142369} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.856858] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.856858] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.857110] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.857459] env[63379]: INFO nova.compute.manager [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1625.857890] env[63379]: DEBUG oslo.service.loopingcall [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.858268] env[63379]: DEBUG nova.compute.manager [-] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1625.858717] env[63379]: DEBUG nova.network.neutron [-] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1625.909166] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.969413] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.969739] env[63379]: DEBUG nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Instance network_info: |[{"id": "9e2aaa43-4ac9-490a-a951-3521757945cd", "address": "fa:16:3e:2b:4b:b8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2aaa43-4a", "ovs_interfaceid": "9e2aaa43-4ac9-490a-a951-3521757945cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1625.970303] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:4b:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e2aaa43-4ac9-490a-a951-3521757945cd', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1625.980422] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Creating folder: Project (c3562bb229474ba7aa3dae98def05260). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1625.983316] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-703eeca5-71fe-4f47-acf2-098df66bea13 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.987091] env[63379]: DEBUG nova.compute.manager [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Received event network-vif-plugged-9e2aaa43-4ac9-490a-a951-3521757945cd {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1625.987491] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] Acquiring lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.987795] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.988070] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.988314] env[63379]: DEBUG nova.compute.manager [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] No waiting events found dispatching network-vif-plugged-9e2aaa43-4ac9-490a-a951-3521757945cd {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1625.988901] env[63379]: WARNING nova.compute.manager [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Received unexpected event network-vif-plugged-9e2aaa43-4ac9-490a-a951-3521757945cd for instance with vm_state building and task_state spawning. [ 1625.988901] env[63379]: DEBUG nova.compute.manager [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Received event network-changed-9e2aaa43-4ac9-490a-a951-3521757945cd {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1625.988901] env[63379]: DEBUG nova.compute.manager [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Refreshing instance network info cache due to event network-changed-9e2aaa43-4ac9-490a-a951-3521757945cd. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1625.989288] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] Acquiring lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.989463] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] Acquired lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.989604] env[63379]: DEBUG nova.network.neutron [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Refreshing network info cache for port 9e2aaa43-4ac9-490a-a951-3521757945cd {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1626.001510] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Created folder: Project (c3562bb229474ba7aa3dae98def05260) in parent group-v369214. [ 1626.001811] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Creating folder: Instances. Parent ref: group-v369393. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1626.002139] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a585c30b-e6e4-4f8e-b51a-9ab432b1980d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.011919] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Created folder: Instances in parent group-v369393. [ 1626.012225] env[63379]: DEBUG oslo.service.loopingcall [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1626.012579] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1626.015823] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92567269-1fee-4ea9-8907-8448b40e5a57 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.045294] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488992} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.045294] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/19a41941-0679-4971-8a44-c95b13f5c294.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1626.045294] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1626.045536] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1626.045536] env[63379]: value = "task-1779608" [ 1626.045536] env[63379]: _type = "Task" [ 1626.045536] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.046192] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0eed7166-223c-494b-9593-0273d080e568 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.058783] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779608, 'name': CreateVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.061820] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1626.061820] env[63379]: value = "task-1779609" [ 1626.061820] env[63379]: _type = "Task" [ 1626.061820] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.129757] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.130689] env[63379]: DEBUG nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1626.134217] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.023s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.134509] env[63379]: DEBUG nova.objects.instance [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lazy-loading 'resources' on Instance uuid 158fe346-93f5-422b-877a-8423547da58f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1626.273205] env[63379]: DEBUG nova.compute.manager [req-9736d950-c6aa-47cb-b1ca-7efe416ddd03 req-68df8fc2-3a89-44e4-812a-45ef8ce0762f service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Received event network-vif-plugged-af03ac28-a066-4ffd-ac52-33d4596db87d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1626.273205] env[63379]: DEBUG oslo_concurrency.lockutils [req-9736d950-c6aa-47cb-b1ca-7efe416ddd03 req-68df8fc2-3a89-44e4-812a-45ef8ce0762f service nova] Acquiring lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.273205] env[63379]: DEBUG oslo_concurrency.lockutils [req-9736d950-c6aa-47cb-b1ca-7efe416ddd03 req-68df8fc2-3a89-44e4-812a-45ef8ce0762f service nova] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.273205] env[63379]: DEBUG oslo_concurrency.lockutils [req-9736d950-c6aa-47cb-b1ca-7efe416ddd03 req-68df8fc2-3a89-44e4-812a-45ef8ce0762f service nova] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.273205] env[63379]: DEBUG nova.compute.manager [req-9736d950-c6aa-47cb-b1ca-7efe416ddd03 req-68df8fc2-3a89-44e4-812a-45ef8ce0762f service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] No waiting events found dispatching network-vif-plugged-af03ac28-a066-4ffd-ac52-33d4596db87d {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1626.273205] env[63379]: WARNING nova.compute.manager [req-9736d950-c6aa-47cb-b1ca-7efe416ddd03 req-68df8fc2-3a89-44e4-812a-45ef8ce0762f service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Received unexpected event network-vif-plugged-af03ac28-a066-4ffd-ac52-33d4596db87d for instance with vm_state building and task_state spawning. [ 1626.340570] env[63379]: DEBUG nova.network.neutron [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Successfully updated port: af03ac28-a066-4ffd-ac52-33d4596db87d {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1626.560375] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779608, 'name': CreateVM_Task, 'duration_secs': 0.401293} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.560659] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1626.562409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.562409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.562672] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1626.563023] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5aed109-1de4-4abc-b4fa-cf141578f32d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.574161] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1626.574161] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52365a59-c0f8-2eec-0f1e-583bb9528230" [ 1626.574161] env[63379]: _type = "Task" [ 1626.574161] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.581615] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085421} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.583185] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1626.584767] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bf2fc8-19e9-40f3-9ed3-ab7de9124293 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.591631] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52365a59-c0f8-2eec-0f1e-583bb9528230, 'name': SearchDatastore_Task, 'duration_secs': 0.011262} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.594884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.594884] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1626.595215] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.595430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.595693] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1626.608313] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffb9896e-893f-4617-9df9-38a4a992b523 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.622916] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/19a41941-0679-4971-8a44-c95b13f5c294.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1626.623402] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-792272e8-00df-4ab7-935b-e2ca0461a7dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.644910] env[63379]: DEBUG nova.compute.utils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1626.651729] env[63379]: DEBUG nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Not allocating networking since 'none' was specified. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1626.651729] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1626.651729] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1626.653202] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64864680-a16d-48e3-b914-c93f64d9f858 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.657347] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1626.657347] env[63379]: value = "task-1779610" [ 1626.657347] env[63379]: _type = "Task" [ 1626.657347] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.662163] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1626.662163] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e41ba0-688b-efb4-0a3b-135cf99c5184" [ 1626.662163] env[63379]: _type = "Task" [ 1626.662163] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.672234] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779610, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.679808] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e41ba0-688b-efb4-0a3b-135cf99c5184, 'name': SearchDatastore_Task, 'duration_secs': 0.008839} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.682430] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2d682c6-021a-4468-ad03-23632e905613 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.688216] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1626.688216] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52890761-c1ca-f4ad-f0a0-3dc13ac34c2d" [ 1626.688216] env[63379]: _type = "Task" [ 1626.688216] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.695429] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52890761-c1ca-f4ad-f0a0-3dc13ac34c2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.772032] env[63379]: DEBUG nova.network.neutron [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Updated VIF entry in instance network info cache for port 9e2aaa43-4ac9-490a-a951-3521757945cd. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1626.772455] env[63379]: DEBUG nova.network.neutron [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Updating instance_info_cache with network_info: [{"id": "9e2aaa43-4ac9-490a-a951-3521757945cd", "address": "fa:16:3e:2b:4b:b8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2aaa43-4a", "ovs_interfaceid": "9e2aaa43-4ac9-490a-a951-3521757945cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.842268] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.842444] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.842607] env[63379]: DEBUG nova.network.neutron [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1627.085321] env[63379]: DEBUG nova.network.neutron [-] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.099534] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7229ade-7a04-48b3-ae9f-6340af4bb45a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.109829] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ecc7d7-e897-4343-9d53-84e6701dc8ca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.141263] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1dec8f-5f7f-4bd2-80f2-5bcd6e0035de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.151663] env[63379]: DEBUG nova.network.neutron [-] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.151663] env[63379]: DEBUG nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1627.154577] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8364fac1-beb1-4966-bfae-b9ce03263c7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.178611] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779610, 'name': ReconfigVM_Task, 'duration_secs': 0.324285} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.178611] env[63379]: DEBUG nova.compute.provider_tree [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.179581] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/19a41941-0679-4971-8a44-c95b13f5c294.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1627.180461] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f675e9ab-9afd-4504-8c02-a2ffa999ca22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.188690] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1627.188690] env[63379]: value = "task-1779611" [ 1627.188690] env[63379]: _type = "Task" [ 1627.188690] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.200907] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52890761-c1ca-f4ad-f0a0-3dc13ac34c2d, 'name': SearchDatastore_Task, 'duration_secs': 0.009007} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.204240] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.204547] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/5c4ae6c6-538a-4724-ad77-340d9c60c24a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1627.205053] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779611, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.205281] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7935f29d-57ca-43d7-a416-6eea98573c3c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.212172] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1627.212172] env[63379]: value = "task-1779612" [ 1627.212172] env[63379]: _type = "Task" [ 1627.212172] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.224279] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779612, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.276432] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8c3beaf-e2a1-4a30-b27b-439524b5124e req-a79851b3-6796-44f3-8d10-152c478d6ecf service nova] Releasing lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.380125] env[63379]: DEBUG nova.network.neutron [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1627.545795] env[63379]: DEBUG nova.network.neutron [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Updating instance_info_cache with network_info: [{"id": "af03ac28-a066-4ffd-ac52-33d4596db87d", "address": "fa:16:3e:41:35:5c", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf03ac28-a0", "ovs_interfaceid": "af03ac28-a066-4ffd-ac52-33d4596db87d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.588542] env[63379]: INFO nova.compute.manager [-] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Took 1.73 seconds to deallocate network for instance. [ 1627.659630] env[63379]: INFO nova.compute.manager [-] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Took 2.17 seconds to deallocate network for instance. [ 1627.682572] env[63379]: DEBUG nova.scheduler.client.report [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1627.706156] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779611, 'name': Rename_Task, 'duration_secs': 0.181413} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.706628] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1627.707017] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb1dbbd8-75df-4c70-9725-06eb45ff9755 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.720768] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1627.720768] env[63379]: value = "task-1779613" [ 1627.720768] env[63379]: _type = "Task" [ 1627.720768] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.730559] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779612, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.736588] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779613, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.049545] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.049863] env[63379]: DEBUG nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Instance network_info: |[{"id": "af03ac28-a066-4ffd-ac52-33d4596db87d", "address": "fa:16:3e:41:35:5c", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf03ac28-a0", "ovs_interfaceid": "af03ac28-a066-4ffd-ac52-33d4596db87d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1628.050358] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:35:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af03ac28-a066-4ffd-ac52-33d4596db87d', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1628.061267] env[63379]: DEBUG oslo.service.loopingcall [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.061982] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1628.062355] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73ab49fc-a3c3-4cdf-968d-39197a9c9faa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.085520] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1628.085520] env[63379]: value = "task-1779614" [ 1628.085520] env[63379]: _type = "Task" [ 1628.085520] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.094587] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779614, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.095915] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.128458] env[63379]: DEBUG nova.compute.manager [req-30fe439e-c833-4584-986d-7acd25c97862 req-bb874462-f327-44b9-b9be-0879dc5875b2 service nova] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Received event network-vif-deleted-93c003c1-3952-4c3f-ac43-f471addf4090 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1628.128741] env[63379]: DEBUG nova.compute.manager [req-30fe439e-c833-4584-986d-7acd25c97862 req-bb874462-f327-44b9-b9be-0879dc5875b2 service nova] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Received event network-vif-deleted-b54d5849-e50d-4f42-922f-70d18e44b988 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1628.168484] env[63379]: DEBUG nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1628.173918] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.190064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.056s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.194835] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.142s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.195434] env[63379]: DEBUG nova.objects.instance [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lazy-loading 'resources' on Instance uuid ee36cc5f-61a1-4e4f-9cae-670f5868d90c {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1628.206109] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1628.206109] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1628.206456] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1628.206515] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1628.206654] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1628.206880] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1628.207099] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1628.207308] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1628.207565] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1628.207672] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1628.207873] env[63379]: DEBUG nova.virt.hardware [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1628.208832] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448ff30e-df05-4dfe-b012-10455a6e19e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.221341] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad993f2-0156-4fc7-bb18-e8fbe316c4ca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.231022] env[63379]: INFO nova.scheduler.client.report [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Deleted allocations for instance 158fe346-93f5-422b-877a-8423547da58f [ 1628.239776] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779612, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55684} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.250819] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/5c4ae6c6-538a-4724-ad77-340d9c60c24a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1628.250819] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1628.251190] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1628.258084] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Creating folder: Project (19828a214eb54fb7b513cb680723a788). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.262373] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-467427fe-d970-4f2e-a3ea-246c173c0be9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.264506] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82916976-c060-454f-95a6-4bc31777156f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.268543] env[63379]: DEBUG oslo_vmware.api [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779613, 'name': PowerOnVM_Task, 'duration_secs': 0.538728} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.268847] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1628.269095] env[63379]: INFO nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1628.269279] env[63379]: DEBUG nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1628.271539] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bb4e92-24b8-4df7-8494-d2f55f4b88ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.278969] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1628.278969] env[63379]: value = "task-1779615" [ 1628.278969] env[63379]: _type = "Task" [ 1628.278969] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.284960] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Created folder: Project (19828a214eb54fb7b513cb680723a788) in parent group-v369214. [ 1628.285192] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Creating folder: Instances. Parent ref: group-v369397. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.285717] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cdb7597-a360-4668-bfa9-5cc8677f0876 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.293426] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779615, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.297065] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Created folder: Instances in parent group-v369397. [ 1628.297065] env[63379]: DEBUG oslo.service.loopingcall [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.297065] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1628.297065] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-126abd71-3ffc-447f-8d7b-e51ed41267aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.316458] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1628.316458] env[63379]: value = "task-1779618" [ 1628.316458] env[63379]: _type = "Task" [ 1628.316458] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.322543] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779618, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.345956] env[63379]: DEBUG nova.compute.manager [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Received event network-changed-af03ac28-a066-4ffd-ac52-33d4596db87d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1628.346123] env[63379]: DEBUG nova.compute.manager [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Refreshing instance network info cache due to event network-changed-af03ac28-a066-4ffd-ac52-33d4596db87d. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1628.346426] env[63379]: DEBUG oslo_concurrency.lockutils [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] Acquiring lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.346627] env[63379]: DEBUG oslo_concurrency.lockutils [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] Acquired lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.346869] env[63379]: DEBUG nova.network.neutron [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Refreshing network info cache for port af03ac28-a066-4ffd-ac52-33d4596db87d {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1628.477670] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.478252] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.478977] env[63379]: INFO nova.compute.manager [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Rebooting instance [ 1628.595570] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779614, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.769652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-29923f7c-e8b5-411c-bd11-cf155895c40e tempest-ServersV294TestFqdnHostnames-1292283937 tempest-ServersV294TestFqdnHostnames-1292283937-project-member] Lock "158fe346-93f5-422b-877a-8423547da58f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.093s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.796402] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779615, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068301} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.798552] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1628.801462] env[63379]: INFO nova.compute.manager [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Took 34.62 seconds to build instance. [ 1628.806788] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a078c8-2ca9-4386-8efd-0b448b935429 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.833441] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/5c4ae6c6-538a-4724-ad77-340d9c60c24a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1628.840310] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c3b4db0-87b8-464e-94ac-f5294ff93257 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.866477] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779618, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.867837] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1628.867837] env[63379]: value = "task-1779619" [ 1628.867837] env[63379]: _type = "Task" [ 1628.867837] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.881863] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779619, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.003978] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.004215] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.004405] env[63379]: DEBUG nova.network.neutron [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1629.098658] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779614, 'name': CreateVM_Task, 'duration_secs': 0.554015} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.098833] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1629.099558] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.101018] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.101018] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1629.101018] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23b92e4a-827a-4e59-afbc-5cad82292d56 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.106379] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1629.106379] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a1ba07-5ca2-d9e0-5ac1-9adf2d55e16e" [ 1629.106379] env[63379]: _type = "Task" [ 1629.106379] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.114927] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a1ba07-5ca2-d9e0-5ac1-9adf2d55e16e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.239985] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097327a4-fd5f-4481-8c74-bb2238c0e200 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.248047] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982878d9-6574-4074-9da6-e07e0af6ff6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.279191] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12a1788-9de9-4df5-9dee-451a1f815700 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.289857] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9645f5-2c19-4fbf-a9bb-0ecfe27b4e63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.305769] env[63379]: DEBUG nova.compute.provider_tree [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.311550] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c604717-3d86-4a7c-aac2-dd6a2dbf198c tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.143s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.335101] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779618, 'name': CreateVM_Task, 'duration_secs': 0.744599} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.335354] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1629.335841] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.340725] env[63379]: DEBUG nova.network.neutron [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Updated VIF entry in instance network info cache for port af03ac28-a066-4ffd-ac52-33d4596db87d. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1629.341339] env[63379]: DEBUG nova.network.neutron [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Updating instance_info_cache with network_info: [{"id": "af03ac28-a066-4ffd-ac52-33d4596db87d", "address": "fa:16:3e:41:35:5c", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf03ac28-a0", "ovs_interfaceid": "af03ac28-a066-4ffd-ac52-33d4596db87d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.387753] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779619, 'name': ReconfigVM_Task, 'duration_secs': 0.296277} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.390911] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/5c4ae6c6-538a-4724-ad77-340d9c60c24a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1629.390911] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e181ddf1-aec6-4911-a2f7-11716634a2d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.402131] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1629.402131] env[63379]: value = "task-1779620" [ 1629.402131] env[63379]: _type = "Task" [ 1629.402131] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.411085] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779620, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.617511] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a1ba07-5ca2-d9e0-5ac1-9adf2d55e16e, 'name': SearchDatastore_Task, 'duration_secs': 0.039732} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.617816] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.618086] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1629.618410] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.618481] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.618642] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1629.618936] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.619269] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1629.619496] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba3411c2-d6d6-433c-8349-a67918edd166 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.621531] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef3d5a4-079f-4cdd-88cb-c76860bd2c03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.627349] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1629.627349] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52adfd0c-dbbb-ce47-5824-fd7046d62a13" [ 1629.627349] env[63379]: _type = "Task" [ 1629.627349] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.635861] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52adfd0c-dbbb-ce47-5824-fd7046d62a13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.646261] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1629.646541] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1629.647513] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f82d4d5-ee4c-4484-90d6-1620370b3e5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.654254] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1629.654254] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e1c977-7a54-471a-c2f3-937f14017e79" [ 1629.654254] env[63379]: _type = "Task" [ 1629.654254] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.665602] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e1c977-7a54-471a-c2f3-937f14017e79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.815384] env[63379]: DEBUG nova.scheduler.client.report [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1629.843711] env[63379]: DEBUG oslo_concurrency.lockutils [req-e836f885-9da3-4028-a3f1-4daa9c355f9d req-0affb0f7-874d-451d-95ad-ad27c5b24d74 service nova] Releasing lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.917202] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779620, 'name': Rename_Task, 'duration_secs': 0.152285} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.919489] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1629.919489] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc8e0306-b433-4021-942d-322308106553 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.925619] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1629.925619] env[63379]: value = "task-1779621" [ 1629.925619] env[63379]: _type = "Task" [ 1629.925619] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.933695] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779621, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.043518] env[63379]: DEBUG nova.network.neutron [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.141280] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52adfd0c-dbbb-ce47-5824-fd7046d62a13, 'name': SearchDatastore_Task, 'duration_secs': 0.028556} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.141630] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.142462] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1630.142462] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.167999] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e1c977-7a54-471a-c2f3-937f14017e79, 'name': SearchDatastore_Task, 'duration_secs': 0.015451} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.168925] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d022c841-51ad-4911-b2ff-19737d221d0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.176267] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1630.176267] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527558e9-34f3-4437-09d5-db6ad80c0864" [ 1630.176267] env[63379]: _type = "Task" [ 1630.176267] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.187389] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527558e9-34f3-4437-09d5-db6ad80c0864, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.325901] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.328961] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.730s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.328961] env[63379]: DEBUG nova.objects.instance [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1630.360389] env[63379]: INFO nova.scheduler.client.report [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Deleted allocations for instance ee36cc5f-61a1-4e4f-9cae-670f5868d90c [ 1630.371242] env[63379]: DEBUG nova.compute.manager [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Received event network-changed-2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1630.371404] env[63379]: DEBUG nova.compute.manager [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing instance network info cache due to event network-changed-2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1630.371651] env[63379]: DEBUG oslo_concurrency.lockutils [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] Acquiring lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.371771] env[63379]: DEBUG oslo_concurrency.lockutils [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] Acquired lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.371999] env[63379]: DEBUG nova.network.neutron [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1630.439340] env[63379]: DEBUG oslo_vmware.api [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779621, 'name': PowerOnVM_Task, 'duration_secs': 0.476397} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.439649] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1630.439856] env[63379]: INFO nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Took 7.51 seconds to spawn the instance on the hypervisor. [ 1630.440056] env[63379]: DEBUG nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1630.441569] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9efeab9-2892-43be-8686-8eb4ff67ac65 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.546742] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.550426] env[63379]: DEBUG nova.compute.manager [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1630.551440] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a39083a-82bf-4b90-9b1b-1669a230010a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.691849] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527558e9-34f3-4437-09d5-db6ad80c0864, 'name': SearchDatastore_Task, 'duration_secs': 0.019069} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.692423] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.693650] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] acc8aa2f-41a8-4f06-8227-a1bae9c93f44/acc8aa2f-41a8-4f06-8227-a1bae9c93f44.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1630.693650] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.693650] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1630.694014] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8af4f8f-c079-4481-aac9-bafa416ab38c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.696742] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39f82442-b9d3-4b9b-bd17-432524bc6c78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.706318] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1630.706318] env[63379]: value = "task-1779622" [ 1630.706318] env[63379]: _type = "Task" [ 1630.706318] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.708058] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1630.708304] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1630.713757] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9e14607-ae31-40f1-be60-0f5dec723f3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.725311] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.725311] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1630.725311] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c3cb80-6a7e-f6e0-d004-bc57ed2338c3" [ 1630.725311] env[63379]: _type = "Task" [ 1630.725311] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.740091] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c3cb80-6a7e-f6e0-d004-bc57ed2338c3, 'name': SearchDatastore_Task, 'duration_secs': 0.009638} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.742703] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1be244e4-4fdd-4f78-81de-212da0def52a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.746868] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "c1858f41-75e7-4eee-a6db-493e150622ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.746868] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "c1858f41-75e7-4eee-a6db-493e150622ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.753726] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1630.753726] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce3ac0-bc25-76e2-8d64-7f5473570ae2" [ 1630.753726] env[63379]: _type = "Task" [ 1630.753726] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.763959] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce3ac0-bc25-76e2-8d64-7f5473570ae2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.878574] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1fa4712c-5ce0-4a58-997b-6de8bc603b4e tempest-ListImageFiltersTestJSON-1778745142 tempest-ListImageFiltersTestJSON-1778745142-project-member] Lock "ee36cc5f-61a1-4e4f-9cae-670f5868d90c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.288s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.964256] env[63379]: INFO nova.compute.manager [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Took 36.78 seconds to build instance. [ 1631.222153] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779622, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.225780] env[63379]: DEBUG nova.network.neutron [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updated VIF entry in instance network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1631.226115] env[63379]: DEBUG nova.network.neutron [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.249087] env[63379]: DEBUG nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1631.267712] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce3ac0-bc25-76e2-8d64-7f5473570ae2, 'name': SearchDatastore_Task, 'duration_secs': 0.009926} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.267712] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.268095] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1631.268300] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8bebaa48-efa1-49f2-8620-9f032ef00db3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.275691] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1631.275691] env[63379]: value = "task-1779623" [ 1631.275691] env[63379]: _type = "Task" [ 1631.275691] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.284668] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.342054] env[63379]: DEBUG oslo_concurrency.lockutils [None req-53e2338d-6e56-4dde-b2c0-ed1ddb232f24 tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.346544] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.645s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.346544] env[63379]: DEBUG nova.objects.instance [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'resources' on Instance uuid 607f9774-0ffc-4ece-a7ba-419fdf6eb26b {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1631.470056] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b7db3f6-ab6b-4804-a790-a3c378990d61 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.300s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.567665] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8310684c-506c-4106-afa5-cf78abf40d29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.575836] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Doing hard reboot of VM {{(pid=63379) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1631.576136] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-07d52194-36e0-40de-bb8f-8e5e992b598f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.582949] env[63379]: DEBUG oslo_vmware.api [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1631.582949] env[63379]: value = "task-1779624" [ 1631.582949] env[63379]: _type = "Task" [ 1631.582949] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.590810] env[63379]: DEBUG oslo_vmware.api [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779624, 'name': ResetVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.728399] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556629} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.728838] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] acc8aa2f-41a8-4f06-8227-a1bae9c93f44/acc8aa2f-41a8-4f06-8227-a1bae9c93f44.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1631.729169] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1631.729520] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83b7bc37-6ebd-44f8-ba21-3aa00a178613 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.738895] env[63379]: DEBUG oslo_concurrency.lockutils [req-1ce42a79-c1f6-45a4-94f1-8c569bbde9d7 req-4ade45f8-d35c-4fa6-a30f-4d2182fec485 service nova] Releasing lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.744945] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1631.744945] env[63379]: value = "task-1779625" [ 1631.744945] env[63379]: _type = "Task" [ 1631.744945] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.757129] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779625, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.775545] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.786208] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779623, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.097105] env[63379]: DEBUG oslo_vmware.api [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779624, 'name': ResetVM_Task, 'duration_secs': 0.117169} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.097403] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Did hard reboot of VM {{(pid=63379) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1632.097595] env[63379]: DEBUG nova.compute.manager [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1632.098563] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f02fb0-8e5e-445a-9223-36b9e40e4515 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.259198] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779625, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.183437} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.261909] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1632.263056] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5598a61d-fa63-4a27-b13b-89eec9457af9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.287832] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] acc8aa2f-41a8-4f06-8227-a1bae9c93f44/acc8aa2f-41a8-4f06-8227-a1bae9c93f44.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1632.287832] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02470fd5-9b30-4c3a-a743-9d104c015172 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.305900] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101cfddb-66b3-4858-bbf5-55b023eb8d48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.321985] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d68bd0-07d9-46d4-a359-c810b4fd7594 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.325339] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779623, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.03803} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.325612] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1632.325612] env[63379]: value = "task-1779626" [ 1632.325612] env[63379]: _type = "Task" [ 1632.325612] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.325880] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1632.326976] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1632.326976] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-843c179b-9a15-4cef-8360-5ef24d42a709 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.356144] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ab1b5f-0da7-4e8d-91d8-1b9921721c27 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.362638] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779626, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.362954] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1632.362954] env[63379]: value = "task-1779627" [ 1632.362954] env[63379]: _type = "Task" [ 1632.362954] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.369989] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6e81f1-bbe7-4872-b827-7dabeafbabc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.376754] env[63379]: INFO nova.compute.manager [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Rescuing [ 1632.377011] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.377176] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.377348] env[63379]: DEBUG nova.network.neutron [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1632.378558] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.389430] env[63379]: DEBUG nova.compute.provider_tree [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.616682] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0910b35-3064-4879-bf02-57970a3bc961 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.139s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.837071] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779626, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.882090] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.279461} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.882420] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1632.883950] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df421787-296d-48a0-a81a-df1c1a61a806 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.896295] env[63379]: DEBUG nova.scheduler.client.report [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1632.918128] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1632.919345] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2937700d-bd84-43e4-b976-d4005838def2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.942686] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1632.942686] env[63379]: value = "task-1779628" [ 1632.942686] env[63379]: _type = "Task" [ 1632.942686] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.952398] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.314672] env[63379]: DEBUG nova.network.neutron [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Updating instance_info_cache with network_info: [{"id": "9e2aaa43-4ac9-490a-a951-3521757945cd", "address": "fa:16:3e:2b:4b:b8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2aaa43-4a", "ovs_interfaceid": "9e2aaa43-4ac9-490a-a951-3521757945cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.339114] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779626, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.420200] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.424647] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.028s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.429375] env[63379]: INFO nova.compute.claims [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1633.459449] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779628, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.461960] env[63379]: INFO nova.scheduler.client.report [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted allocations for instance 607f9774-0ffc-4ece-a7ba-419fdf6eb26b [ 1633.818131] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "refresh_cache-5c4ae6c6-538a-4724-ad77-340d9c60c24a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.837804] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779626, 'name': ReconfigVM_Task, 'duration_secs': 1.029838} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.840348] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Reconfigured VM instance instance-00000040 to attach disk [datastore1] acc8aa2f-41a8-4f06-8227-a1bae9c93f44/acc8aa2f-41a8-4f06-8227-a1bae9c93f44.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1633.841015] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c437450c-387d-4685-8129-db556ed7c1d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.853886] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1633.853886] env[63379]: value = "task-1779629" [ 1633.853886] env[63379]: _type = "Task" [ 1633.853886] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.860798] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779629, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.958422] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779628, 'name': ReconfigVM_Task, 'duration_secs': 0.549644} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.958543] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Reconfigured VM instance instance-00000041 to attach disk [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1633.959360] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a667b04c-d3a2-4603-8907-abf31dc4f35f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.969780] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1633.969780] env[63379]: value = "task-1779630" [ 1633.969780] env[63379]: _type = "Task" [ 1633.969780] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.970254] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03b56432-c718-41b0-8304-95164de4e295 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "607f9774-0ffc-4ece-a7ba-419fdf6eb26b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.446s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.980261] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779630, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.357587] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1634.363143] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5fdb9f56-9f1f-4c3e-ace8-70019286ddb0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.373088] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779629, 'name': Rename_Task, 'duration_secs': 0.21158} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.373088] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1634.373088] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1634.373088] env[63379]: value = "task-1779631" [ 1634.373088] env[63379]: _type = "Task" [ 1634.373088] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.373088] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ff9cb31-c0a4-44a3-b10c-bc1a1e7894e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.387200] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.389519] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1634.389519] env[63379]: value = "task-1779632" [ 1634.389519] env[63379]: _type = "Task" [ 1634.389519] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.398281] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.485486] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779630, 'name': Rename_Task, 'duration_secs': 0.287412} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.485822] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1634.486650] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35e244d5-e56b-4e21-8661-64a583e3b225 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.496049] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1634.496049] env[63379]: value = "task-1779633" [ 1634.496049] env[63379]: _type = "Task" [ 1634.496049] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.503889] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.890451] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779631, 'name': PowerOffVM_Task, 'duration_secs': 0.251855} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.894981] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1634.896797] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19384e9-4ce9-4d9a-9ddd-86c913763a8a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.900879] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3d3374-e15b-4a5e-abf3-f23aa968235d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.908587] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779632, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.931028] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0db29d4-3aa8-42ef-8f73-b7d4053f590c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.934768] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f3ff7c-a2af-454d-8cfc-149acccb2e60 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.971372] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39acd0bd-8d49-49b3-aa38-19b797da194c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.983528] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d81084b-f360-4738-8c36-45980662d3c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.000367] env[63379]: DEBUG nova.compute.provider_tree [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.003839] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1635.008423] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e499895a-6e6e-48ff-8df2-d8da21fe3853 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.016013] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779633, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.016013] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1635.016013] env[63379]: value = "task-1779634" [ 1635.016013] env[63379]: _type = "Task" [ 1635.016013] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.026663] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1635.026663] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1635.026663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.026663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.026663] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1635.026663] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77d3d839-2b50-4eaa-8f07-a08310f2064c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.034965] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1635.034965] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1635.035491] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecea06f3-4bdb-4058-99de-8cb520cbff0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.043022] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1635.043022] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52eb067e-be8d-7d7d-dd3e-834765d4eab3" [ 1635.043022] env[63379]: _type = "Task" [ 1635.043022] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.048122] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52eb067e-be8d-7d7d-dd3e-834765d4eab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.401489] env[63379]: DEBUG oslo_vmware.api [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779632, 'name': PowerOnVM_Task, 'duration_secs': 0.70565} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.401693] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1635.403106] env[63379]: INFO nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Took 9.86 seconds to spawn the instance on the hypervisor. [ 1635.403106] env[63379]: DEBUG nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1635.403106] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0a0d53-3c8f-46fb-9c70-5104b160b785 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.511395] env[63379]: DEBUG nova.scheduler.client.report [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1635.514844] env[63379]: DEBUG oslo_vmware.api [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779633, 'name': PowerOnVM_Task, 'duration_secs': 0.622548} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.515533] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1635.515774] env[63379]: INFO nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Took 7.35 seconds to spawn the instance on the hypervisor. [ 1635.515974] env[63379]: DEBUG nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1635.517079] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070e93e2-1d9c-472b-9cc1-7e453ae1bae7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.553138] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52eb067e-be8d-7d7d-dd3e-834765d4eab3, 'name': SearchDatastore_Task, 'duration_secs': 0.015012} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.554111] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd88eaa-fe71-4683-aea0-8f3db17edf44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.560692] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1635.560692] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52586c5e-ef92-cc5f-799b-cef8250f87ae" [ 1635.560692] env[63379]: _type = "Task" [ 1635.560692] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.569777] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52586c5e-ef92-cc5f-799b-cef8250f87ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.620832] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.621047] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.732054] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "266cc3d5-c10d-4367-a879-d170802495db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.732321] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "266cc3d5-c10d-4367-a879-d170802495db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.844138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] Acquiring lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.844138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] Acquired lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.844238] env[63379]: DEBUG nova.network.neutron [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1635.925376] env[63379]: INFO nova.compute.manager [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Took 39.63 seconds to build instance. [ 1636.016523] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.017113] env[63379]: DEBUG nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1636.020447] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.385s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.020698] env[63379]: DEBUG nova.objects.instance [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lazy-loading 'resources' on Instance uuid 2a996f06-542e-4f71-95a4-0f71097d1478 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1636.037715] env[63379]: INFO nova.compute.manager [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Took 37.81 seconds to build instance. [ 1636.071361] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52586c5e-ef92-cc5f-799b-cef8250f87ae, 'name': SearchDatastore_Task, 'duration_secs': 0.035226} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.071645] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.071910] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. {{(pid=63379) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1636.072300] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9280f1d3-c240-4b34-ba5d-678e1e647997 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.080739] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1636.080739] env[63379]: value = "task-1779635" [ 1636.080739] env[63379]: _type = "Task" [ 1636.080739] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.088527] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779635, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.123420] env[63379]: DEBUG nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1636.236978] env[63379]: DEBUG nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1636.429564] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0497b5a4-0faf-47b8-b0e7-fbaec7224257 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.160s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.525681] env[63379]: DEBUG nova.compute.utils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1636.531087] env[63379]: DEBUG nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1636.531657] env[63379]: DEBUG nova.network.neutron [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1636.545605] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2abaad4e-6454-417e-a87f-9d924bba06f9 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "b91a5b89-0456-431d-b099-adda3a6b3024" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.331s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.597641] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779635, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.623213] env[63379]: DEBUG nova.policy [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eff0cdeebe5d4a538e478145a71130fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '075142842d284d6f8fdec7592239a03c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1636.649296] env[63379]: DEBUG nova.network.neutron [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Updating instance_info_cache with network_info: [{"id": "af03ac28-a066-4ffd-ac52-33d4596db87d", "address": "fa:16:3e:41:35:5c", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf03ac28-a0", "ovs_interfaceid": "af03ac28-a066-4ffd-ac52-33d4596db87d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.654344] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.766947] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.013198] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08d6b85-4149-415f-91ea-7f08b36c1243 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.021250] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ca3e88-778d-49e5-ab72-638c8f81f64a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.050742] env[63379]: DEBUG nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1637.057018] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63b5460-672b-447b-a08f-9e36ad4814cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.062215] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad088f2f-77f8-4efa-bb6e-3c6f18cd5f77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.077557] env[63379]: DEBUG nova.compute.provider_tree [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1637.090064] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779635, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720542} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.090458] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. [ 1637.091243] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5222418-d4ae-440e-b638-0a23c1afa3f8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.120074] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1637.120074] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33bcb66a-077a-47c0-b951-d1f6dd9fde32 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.138306] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1637.138306] env[63379]: value = "task-1779636" [ 1637.138306] env[63379]: _type = "Task" [ 1637.138306] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.147290] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.156382] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] Releasing lock "refresh_cache-acc8aa2f-41a8-4f06-8227-a1bae9c93f44" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.156626] env[63379]: DEBUG nova.compute.manager [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Inject network info {{(pid=63379) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7265}} [ 1637.156941] env[63379]: DEBUG nova.compute.manager [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] network_info to inject: |[{"id": "af03ac28-a066-4ffd-ac52-33d4596db87d", "address": "fa:16:3e:41:35:5c", "network": {"id": "37610d35-b1d3-4657-9542-9e3e955af5be", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-58653119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6552f9956224ba5a0a01328da741242", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf03ac28-a0", "ovs_interfaceid": "af03ac28-a066-4ffd-ac52-33d4596db87d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7266}} [ 1637.161811] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Reconfiguring VM instance to set the machine id {{(pid=63379) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1637.162279] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0e75281-9565-465f-a485-3eda446c694b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.175669] env[63379]: INFO nova.compute.manager [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Rebuilding instance [ 1637.180733] env[63379]: DEBUG oslo_vmware.api [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] Waiting for the task: (returnval){ [ 1637.180733] env[63379]: value = "task-1779637" [ 1637.180733] env[63379]: _type = "Task" [ 1637.180733] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.191079] env[63379]: DEBUG oslo_vmware.api [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] Task: {'id': task-1779637, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.211688] env[63379]: DEBUG nova.network.neutron [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Successfully created port: 333b52cb-3eba-421d-a26f-dc741d462410 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.227892] env[63379]: DEBUG nova.compute.manager [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1637.228963] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f334b7a-263c-4f89-a691-4547c705b8ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.580968] env[63379]: DEBUG nova.scheduler.client.report [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1637.648880] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779636, 'name': ReconfigVM_Task, 'duration_secs': 0.344028} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.649238] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1637.650130] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb210379-e72a-4090-9cda-284c4f1ebd11 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.677047] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0444fcb0-bec6-4e2c-a9ba-dce0f9b64132 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.699838] env[63379]: DEBUG oslo_vmware.api [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] Task: {'id': task-1779637, 'name': ReconfigVM_Task, 'duration_secs': 0.184189} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.701426] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6064167c-5187-4906-b096-77487e11ecf0 tempest-ServersAdminTestJSON-1417567887 tempest-ServersAdminTestJSON-1417567887-project-admin] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Reconfigured VM instance to set the machine id {{(pid=63379) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1637.701874] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1637.701874] env[63379]: value = "task-1779638" [ 1637.701874] env[63379]: _type = "Task" [ 1637.701874] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.709871] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779638, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.742367] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1637.743408] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a985b35-f743-40ef-9a96-254656fba6ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.749605] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1637.749605] env[63379]: value = "task-1779639" [ 1637.749605] env[63379]: _type = "Task" [ 1637.749605] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.758989] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.071060] env[63379]: DEBUG nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1638.091713] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.071s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.096352] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1638.096598] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1638.096753] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.096938] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1638.097202] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.097301] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1638.097474] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1638.097638] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1638.097809] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1638.097978] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1638.098347] env[63379]: DEBUG nova.virt.hardware [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1638.098799] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.685s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.099055] env[63379]: DEBUG nova.objects.instance [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lazy-loading 'resources' on Instance uuid ac596f08-86a3-42e0-86e6-41a173fe868f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1638.100793] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8089e209-7618-4ef2-909c-f9ef70664aba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.110194] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dea1757-569f-418c-ad63-90955f63ef26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.126284] env[63379]: INFO nova.scheduler.client.report [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleted allocations for instance 2a996f06-542e-4f71-95a4-0f71097d1478 [ 1638.212966] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779638, 'name': ReconfigVM_Task, 'duration_secs': 0.164815} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.213295] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1638.213561] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f132f41-7cc8-436a-9eed-58e067d3813e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.220053] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1638.220053] env[63379]: value = "task-1779640" [ 1638.220053] env[63379]: _type = "Task" [ 1638.220053] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.227702] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.261097] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779639, 'name': PowerOffVM_Task, 'duration_secs': 0.277157} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.261456] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1638.261699] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1638.262760] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a1c9e3-97cd-4d19-b6a7-a9d7079f9598 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.270116] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1638.270373] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f60f3e0a-9756-40ca-a371-3a609de0067c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.295733] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1638.296124] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1638.296374] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Deleting the datastore file [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1638.296672] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-129565c5-3bb3-4b35-acf9-ac943008dbb1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.304553] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1638.304553] env[63379]: value = "task-1779642" [ 1638.304553] env[63379]: _type = "Task" [ 1638.304553] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.313018] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.489052] env[63379]: INFO nova.compute.manager [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Rebuilding instance [ 1638.530946] env[63379]: DEBUG nova.compute.manager [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1638.531881] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d60190-a103-4c5a-a89a-da400129ec94 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.637771] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd53a111-d3e7-4980-8551-aefbe6955814 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "2a996f06-542e-4f71-95a4-0f71097d1478" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.797s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.734281] env[63379]: DEBUG oslo_vmware.api [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779640, 'name': PowerOnVM_Task, 'duration_secs': 0.435073} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.735231] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1638.738628] env[63379]: DEBUG nova.compute.manager [None req-fc2998f3-7b60-4305-a056-b6213632209f tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1638.741124] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8583dc0-23c7-4df4-8ada-e23d2085c827 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.804550] env[63379]: DEBUG nova.compute.manager [req-e0b42d6d-cad0-48f1-ad4f-bf8013e22fbb req-7d5e93d5-f700-440b-9c90-1b116c4e8af7 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Received event network-vif-plugged-333b52cb-3eba-421d-a26f-dc741d462410 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1638.804738] env[63379]: DEBUG oslo_concurrency.lockutils [req-e0b42d6d-cad0-48f1-ad4f-bf8013e22fbb req-7d5e93d5-f700-440b-9c90-1b116c4e8af7 service nova] Acquiring lock "5aad86f8-0b3b-43ca-982b-c670e3411c01-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.806585] env[63379]: DEBUG oslo_concurrency.lockutils [req-e0b42d6d-cad0-48f1-ad4f-bf8013e22fbb req-7d5e93d5-f700-440b-9c90-1b116c4e8af7 service nova] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.806937] env[63379]: DEBUG oslo_concurrency.lockutils [req-e0b42d6d-cad0-48f1-ad4f-bf8013e22fbb req-7d5e93d5-f700-440b-9c90-1b116c4e8af7 service nova] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.807025] env[63379]: DEBUG nova.compute.manager [req-e0b42d6d-cad0-48f1-ad4f-bf8013e22fbb req-7d5e93d5-f700-440b-9c90-1b116c4e8af7 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] No waiting events found dispatching network-vif-plugged-333b52cb-3eba-421d-a26f-dc741d462410 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1638.807219] env[63379]: WARNING nova.compute.manager [req-e0b42d6d-cad0-48f1-ad4f-bf8013e22fbb req-7d5e93d5-f700-440b-9c90-1b116c4e8af7 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Received unexpected event network-vif-plugged-333b52cb-3eba-421d-a26f-dc741d462410 for instance with vm_state building and task_state spawning. [ 1638.821140] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306723} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.821408] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1638.821600] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1638.821780] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1638.927504] env[63379]: DEBUG nova.network.neutron [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Successfully updated port: 333b52cb-3eba-421d-a26f-dc741d462410 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1639.045546] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1639.045932] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2639bed-03d8-45fa-9032-15004f3969fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.053414] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1639.053414] env[63379]: value = "task-1779643" [ 1639.053414] env[63379]: _type = "Task" [ 1639.053414] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.065423] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.094301] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e0e2fe-6915-4350-ba87-cb012ba65f5b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.101925] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1b63b0-ec1d-4bd8-9cd4-6b06bb80e053 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.134894] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba55a48-1ca2-4d66-8dd2-cfd0e96040b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.144413] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46a1c28-8416-4d65-9d77-8a778545b7a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.161519] env[63379]: DEBUG nova.compute.provider_tree [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1639.431174] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "refresh_cache-5aad86f8-0b3b-43ca-982b-c670e3411c01" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.431428] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquired lock "refresh_cache-5aad86f8-0b3b-43ca-982b-c670e3411c01" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.431670] env[63379]: DEBUG nova.network.neutron [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1639.566933] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779643, 'name': PowerOffVM_Task, 'duration_secs': 0.451248} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.567312] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1639.567747] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1639.568610] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df31f38-9b9c-45ba-a05e-defd5642288c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.576332] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1639.576569] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef13191a-52f6-49df-b22c-64f6473a678a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.666309] env[63379]: DEBUG nova.scheduler.client.report [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1639.770014] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1639.770260] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1639.770448] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleting the datastore file [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1639.770721] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8803f6e9-10a6-48e6-84fe-633459dd6560 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.776839] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1639.776839] env[63379]: value = "task-1779645" [ 1639.776839] env[63379]: _type = "Task" [ 1639.776839] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.784653] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.856280] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1639.856549] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1639.856714] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1639.856903] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1639.857075] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1639.857236] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1639.857459] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1639.857627] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1639.857798] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1639.857967] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1639.858211] env[63379]: DEBUG nova.virt.hardware [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1639.859085] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f054eaa6-2748-45cf-b808-d73ee2852534 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.867462] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6029404-3963-4e6f-bb8f-2fca29ae7ac9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.881201] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1639.886906] env[63379]: DEBUG oslo.service.loopingcall [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1639.887204] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1639.887452] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32dd0fc2-0f14-4bc8-8865-72cd9da01f58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.908026] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1639.908026] env[63379]: value = "task-1779646" [ 1639.908026] env[63379]: _type = "Task" [ 1639.908026] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.912911] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779646, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.986589] env[63379]: DEBUG nova.network.neutron [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1640.107205] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "da66c3d9-ca03-4113-8703-64b666628936" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.107559] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "da66c3d9-ca03-4113-8703-64b666628936" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.172098] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.073s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.177075] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.499s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.177251] env[63379]: DEBUG nova.objects.instance [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lazy-loading 'resources' on Instance uuid 941ac23c-6aa9-4ed1-840a-326423b7cbc0 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1640.200997] env[63379]: INFO nova.scheduler.client.report [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Deleted allocations for instance ac596f08-86a3-42e0-86e6-41a173fe868f [ 1640.220600] env[63379]: DEBUG nova.network.neutron [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Updating instance_info_cache with network_info: [{"id": "333b52cb-3eba-421d-a26f-dc741d462410", "address": "fa:16:3e:3e:47:e3", "network": {"id": "7dac6426-c05a-46bf-b157-50f5ecbe4faa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1105910257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "075142842d284d6f8fdec7592239a03c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap333b52cb-3e", "ovs_interfaceid": "333b52cb-3eba-421d-a26f-dc741d462410", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1640.287464] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270318} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.287656] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1640.287882] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1640.288119] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1640.418025] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779646, 'name': CreateVM_Task, 'duration_secs': 0.280621} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.418025] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1640.418251] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.418505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.418942] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1640.419206] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8457323b-b09b-4a18-87dd-75951386764d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.424661] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1640.424661] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c31cdd-5807-0bc2-143b-1ec1383baaee" [ 1640.424661] env[63379]: _type = "Task" [ 1640.424661] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.434821] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c31cdd-5807-0bc2-143b-1ec1383baaee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.504051] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.504237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.610490] env[63379]: DEBUG nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1640.710641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c11d0bc1-2313-40aa-bfc2-c315ed7e2fcc tempest-ServerShowV254Test-462289757 tempest-ServerShowV254Test-462289757-project-member] Lock "ac596f08-86a3-42e0-86e6-41a173fe868f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.530s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.726140] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Releasing lock "refresh_cache-5aad86f8-0b3b-43ca-982b-c670e3411c01" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.726371] env[63379]: DEBUG nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Instance network_info: |[{"id": "333b52cb-3eba-421d-a26f-dc741d462410", "address": "fa:16:3e:3e:47:e3", "network": {"id": "7dac6426-c05a-46bf-b157-50f5ecbe4faa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1105910257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "075142842d284d6f8fdec7592239a03c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap333b52cb-3e", "ovs_interfaceid": "333b52cb-3eba-421d-a26f-dc741d462410", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1640.727511] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:47:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496faa4d-d874-449b-905e-328ddd60b31b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '333b52cb-3eba-421d-a26f-dc741d462410', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.735035] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Creating folder: Project (075142842d284d6f8fdec7592239a03c). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.735528] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5cc7ce4-2a1e-48b1-9580-0cb32a6555e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.753572] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Created folder: Project (075142842d284d6f8fdec7592239a03c) in parent group-v369214. [ 1640.753814] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Creating folder: Instances. Parent ref: group-v369401. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.754073] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-757be09a-5204-4a9e-86b5-4209e1a790e1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.764147] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Created folder: Instances in parent group-v369401. [ 1640.764310] env[63379]: DEBUG oslo.service.loopingcall [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.764500] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.764709] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7401f38-8259-4c73-ab75-82b32940ec12 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.786885] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.786885] env[63379]: value = "task-1779649" [ 1640.786885] env[63379]: _type = "Task" [ 1640.786885] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.798336] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779649, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.832858] env[63379]: DEBUG nova.compute.manager [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Received event network-changed-333b52cb-3eba-421d-a26f-dc741d462410 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1640.833088] env[63379]: DEBUG nova.compute.manager [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Refreshing instance network info cache due to event network-changed-333b52cb-3eba-421d-a26f-dc741d462410. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1640.833317] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] Acquiring lock "refresh_cache-5aad86f8-0b3b-43ca-982b-c670e3411c01" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.833480] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] Acquired lock "refresh_cache-5aad86f8-0b3b-43ca-982b-c670e3411c01" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.833626] env[63379]: DEBUG nova.network.neutron [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Refreshing network info cache for port 333b52cb-3eba-421d-a26f-dc741d462410 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1640.934949] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c31cdd-5807-0bc2-143b-1ec1383baaee, 'name': SearchDatastore_Task, 'duration_secs': 0.026779} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.937564] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.937813] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1640.938065] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.938226] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.938415] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1640.938899] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5511105f-55ff-45cf-83a1-50d1b9c9c924 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.947614] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1640.947810] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1640.948573] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bf14917-aedb-4db6-afa7-7a8c678be11a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.958570] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1640.958570] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a6ffd8-4368-dcfc-7b80-3634ba68694e" [ 1640.958570] env[63379]: _type = "Task" [ 1640.958570] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.967135] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a6ffd8-4368-dcfc-7b80-3634ba68694e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.006484] env[63379]: DEBUG nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1641.069733] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304d8fda-437e-4006-93be-0989e705a75c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.077651] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2faeb4-3e5f-4462-9ca7-d8470ddaad04 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.108108] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c539dd8-f823-4918-9a5e-919f8d5007d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.118814] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6f34dc-c43b-44de-b8de-1eb01902b1cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.135470] env[63379]: DEBUG nova.compute.provider_tree [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.137625] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.303488] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779649, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.328526] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1641.328792] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1641.328985] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1641.329220] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1641.329375] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1641.329527] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1641.329740] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1641.329905] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1641.330094] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1641.330265] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1641.330442] env[63379]: DEBUG nova.virt.hardware [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1641.331338] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d524d613-00fa-4d52-8a93-67332d9a4463 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.340948] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3256cdcb-d550-4371-9050-83dfc0629808 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.355429] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:be:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '021a6cdc-585b-40dc-a330-d328102cf80c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1641.362872] env[63379]: DEBUG oslo.service.loopingcall [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1641.363144] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1641.363741] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32a7dcc3-0db4-4c74-a002-68b2cc9d0e61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.387013] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1641.387013] env[63379]: value = "task-1779650" [ 1641.387013] env[63379]: _type = "Task" [ 1641.387013] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.395111] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779650, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.474693] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a6ffd8-4368-dcfc-7b80-3634ba68694e, 'name': SearchDatastore_Task, 'duration_secs': 0.009614} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.475553] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38652300-a294-4cb2-b623-376e6fb9bb6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.481300] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1641.481300] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f3d99-8737-c14c-19ce-7f882e770001" [ 1641.481300] env[63379]: _type = "Task" [ 1641.481300] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.489830] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f3d99-8737-c14c-19ce-7f882e770001, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.530047] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.639652] env[63379]: DEBUG nova.scheduler.client.report [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1641.681706] env[63379]: DEBUG nova.network.neutron [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Updated VIF entry in instance network info cache for port 333b52cb-3eba-421d-a26f-dc741d462410. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1641.681706] env[63379]: DEBUG nova.network.neutron [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Updating instance_info_cache with network_info: [{"id": "333b52cb-3eba-421d-a26f-dc741d462410", "address": "fa:16:3e:3e:47:e3", "network": {"id": "7dac6426-c05a-46bf-b157-50f5ecbe4faa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1105910257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "075142842d284d6f8fdec7592239a03c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496faa4d-d874-449b-905e-328ddd60b31b", "external-id": "nsx-vlan-transportzone-391", "segmentation_id": 391, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap333b52cb-3e", "ovs_interfaceid": "333b52cb-3eba-421d-a26f-dc741d462410", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.798206] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779649, 'name': CreateVM_Task, 'duration_secs': 0.818681} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.798206] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1641.798708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.798957] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.799391] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1641.799660] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b5b7ae6-0c07-4357-a28c-367bb712ba6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.804352] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1641.804352] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525c2929-1b96-bcf4-b91b-5441ed6918c0" [ 1641.804352] env[63379]: _type = "Task" [ 1641.804352] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.812424] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525c2929-1b96-bcf4-b91b-5441ed6918c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.897926] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779650, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.991964] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f3d99-8737-c14c-19ce-7f882e770001, 'name': SearchDatastore_Task, 'duration_secs': 0.010376} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.992283] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.992557] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1641.992824] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2ef4f7e-911b-45b7-8a30-dc3cb082279f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.999935] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1641.999935] env[63379]: value = "task-1779651" [ 1641.999935] env[63379]: _type = "Task" [ 1641.999935] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.007978] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.145472] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.148174] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 30.162s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.148357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.148515] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1642.148914] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 28.407s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.151063] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ae697b-6fd8-475a-943e-754e97ef7278 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.161692] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bff176d-a859-49ec-8b8a-15f2691598f0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.166577] env[63379]: INFO nova.scheduler.client.report [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Deleted allocations for instance 941ac23c-6aa9-4ed1-840a-326423b7cbc0 [ 1642.182141] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f305483f-be85-4092-aec8-a20fecd89e88 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.185047] env[63379]: DEBUG oslo_concurrency.lockutils [req-bc3a78b2-92ab-4f95-b380-679b170cf04a req-f8f74248-53f7-4c0e-b190-1635c0f797b6 service nova] Releasing lock "refresh_cache-5aad86f8-0b3b-43ca-982b-c670e3411c01" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.189794] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7e2175-c95d-4fd7-bd63-5b05ab661793 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.221513] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178736MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1642.221665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.316220] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525c2929-1b96-bcf4-b91b-5441ed6918c0, 'name': SearchDatastore_Task, 'duration_secs': 0.031647} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.316542] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.316782] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1642.317062] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.317225] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.317414] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1642.317677] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f530c0c-2bce-44ad-a619-1c082ffe7fa2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.327028] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1642.327225] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1642.327945] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21eb0394-c515-479c-a26a-c3c06e219fab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.337019] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1642.337019] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526bea9e-79c3-ccf5-3848-63e6968b6c88" [ 1642.337019] env[63379]: _type = "Task" [ 1642.337019] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.342499] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526bea9e-79c3-ccf5-3848-63e6968b6c88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.397492] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779650, 'name': CreateVM_Task, 'duration_secs': 0.762311} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.397686] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1642.398700] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.398924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.399287] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1642.399555] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e32d59e-c62e-4c37-96bc-1144a77f6842 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.404327] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1642.404327] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ee0b2-0b69-dd85-cc59-7de8e01d3aa4" [ 1642.404327] env[63379]: _type = "Task" [ 1642.404327] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.414986] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ee0b2-0b69-dd85-cc59-7de8e01d3aa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.511694] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779651, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.655416] env[63379]: DEBUG nova.objects.instance [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lazy-loading 'migration_context' on Instance uuid f082cdd7-228e-4100-b301-5af6daea9b36 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1642.686673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6ff500ce-a441-48c0-99b5-b45d99dca87e tempest-ServersTestJSON-443652715 tempest-ServersTestJSON-443652715-project-member] Lock "941ac23c-6aa9-4ed1-840a-326423b7cbc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.515s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.846827] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526bea9e-79c3-ccf5-3848-63e6968b6c88, 'name': SearchDatastore_Task, 'duration_secs': 0.009869} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.847760] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c845794a-0727-402c-9f18-966087eae759 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.853173] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1642.853173] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c11d8c-a340-3146-9a3b-6bacf40c7e9f" [ 1642.853173] env[63379]: _type = "Task" [ 1642.853173] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.861604] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c11d8c-a340-3146-9a3b-6bacf40c7e9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.915500] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ee0b2-0b69-dd85-cc59-7de8e01d3aa4, 'name': SearchDatastore_Task, 'duration_secs': 0.057728} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.915671] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.915857] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1642.916333] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.011753] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790772} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.011753] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1643.011753] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1643.011753] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c653d5d7-7113-4cfb-9787-1035f061431a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.018322] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1643.018322] env[63379]: value = "task-1779652" [ 1643.018322] env[63379]: _type = "Task" [ 1643.018322] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.026417] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.364120] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c11d8c-a340-3146-9a3b-6bacf40c7e9f, 'name': SearchDatastore_Task, 'duration_secs': 0.010591} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.364419] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.364668] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5aad86f8-0b3b-43ca-982b-c670e3411c01/5aad86f8-0b3b-43ca-982b-c670e3411c01.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1643.364954] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.365167] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.365388] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d36dce39-d81b-4413-b4e4-5331b11e9245 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.369149] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b5ad2f8-a1e3-43dd-b652-fc5cbe88582f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.376188] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1643.376188] env[63379]: value = "task-1779653" [ 1643.376188] env[63379]: _type = "Task" [ 1643.376188] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.377057] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.377865] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1643.380569] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4301de35-bd97-4677-8705-09fea0b95c07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.387419] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.391070] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1643.391070] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528c163f-8b68-6dba-e5e3-53d262e28e90" [ 1643.391070] env[63379]: _type = "Task" [ 1643.391070] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.398758] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528c163f-8b68-6dba-e5e3-53d262e28e90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.533109] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0935} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.535831] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1643.536841] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40355b53-e081-4e7d-a6dc-8e950a85533e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.558347] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1643.561382] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8226c43-0bf8-474d-82cf-94a0b1516b6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.583076] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1643.583076] env[63379]: value = "task-1779654" [ 1643.583076] env[63379]: _type = "Task" [ 1643.583076] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.593880] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779654, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.602942] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16573d7e-2df4-4d3e-bae2-ead647a977e1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.614542] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d920673e-d037-4061-9b4a-c025f0d1f80e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.651643] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6210a5a3-7b68-4fc4-bf14-1ef29f7d8383 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.661333] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeefc5d4-b549-4f49-aaa6-d55756cee523 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.677658] env[63379]: DEBUG nova.compute.provider_tree [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1643.886562] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779653, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.902530] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528c163f-8b68-6dba-e5e3-53d262e28e90, 'name': SearchDatastore_Task, 'duration_secs': 0.014619} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.903489] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eade9f2a-d121-40a4-b66b-64411ad1126b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.912029] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1643.912029] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a5a8c0-a0e9-9cc1-0ccd-348d74c85d85" [ 1643.912029] env[63379]: _type = "Task" [ 1643.912029] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.918672] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a5a8c0-a0e9-9cc1-0ccd-348d74c85d85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.094033] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779654, 'name': ReconfigVM_Task, 'duration_secs': 0.402473} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.094033] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Reconfigured VM instance instance-00000041 to attach disk [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024/b91a5b89-0456-431d-b099-adda3a6b3024.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1644.094033] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff138cfb-0607-43dd-8cfa-c7683a05c384 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.100754] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1644.100754] env[63379]: value = "task-1779655" [ 1644.100754] env[63379]: _type = "Task" [ 1644.100754] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.108982] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779655, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.181370] env[63379]: DEBUG nova.scheduler.client.report [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1644.387582] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779653, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514119} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.387918] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5aad86f8-0b3b-43ca-982b-c670e3411c01/5aad86f8-0b3b-43ca-982b-c670e3411c01.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1644.388187] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1644.388445] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fdfbfd68-7ed7-4d03-96cc-569ef0e1a86c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.394945] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1644.394945] env[63379]: value = "task-1779656" [ 1644.394945] env[63379]: _type = "Task" [ 1644.394945] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.403805] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779656, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.422382] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a5a8c0-a0e9-9cc1-0ccd-348d74c85d85, 'name': SearchDatastore_Task, 'duration_secs': 0.009594} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.422729] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.423763] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1644.424098] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d1a996c-3a56-4e9b-ad3b-c1efe6b8f428 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.434922] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1644.434922] env[63379]: value = "task-1779657" [ 1644.434922] env[63379]: _type = "Task" [ 1644.434922] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.446189] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.611653] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779655, 'name': Rename_Task, 'duration_secs': 0.179588} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.611944] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1644.612223] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5c17413-5ccd-4bc5-9a98-53af68b9143e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.619170] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1644.619170] env[63379]: value = "task-1779658" [ 1644.619170] env[63379]: _type = "Task" [ 1644.619170] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.627091] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.905098] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779656, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069709} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.905393] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1644.906229] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147f0cf9-ac81-4a05-a28f-4cba9c01a38a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.942580] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 5aad86f8-0b3b-43ca-982b-c670e3411c01/5aad86f8-0b3b-43ca-982b-c670e3411c01.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1644.943431] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa6c2d50-9864-40d4-b979-813c6446cdf2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.982199] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779657, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.983688] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1644.983688] env[63379]: value = "task-1779659" [ 1644.983688] env[63379]: _type = "Task" [ 1644.983688] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.993692] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779659, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.132131] env[63379]: DEBUG oslo_vmware.api [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779658, 'name': PowerOnVM_Task, 'duration_secs': 0.412491} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.132458] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1645.133249] env[63379]: DEBUG nova.compute.manager [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1645.133844] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc2c7f8-a065-47ad-bd01-5ca3fa4c82d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.192859] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.044s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.208465] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.169s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.213727] env[63379]: INFO nova.compute.claims [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1645.454037] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.969453} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.454423] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1645.454747] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1645.455099] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eadeddcb-bc75-423b-9833-980cbb82c136 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.462249] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1645.462249] env[63379]: value = "task-1779660" [ 1645.462249] env[63379]: _type = "Task" [ 1645.462249] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.469997] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.495465] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779659, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.653616] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.973044] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.995820] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779659, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.283952] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "b91a5b89-0456-431d-b099-adda3a6b3024" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.284256] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "b91a5b89-0456-431d-b099-adda3a6b3024" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.284469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "b91a5b89-0456-431d-b099-adda3a6b3024-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.284660] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "b91a5b89-0456-431d-b099-adda3a6b3024-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.284866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "b91a5b89-0456-431d-b099-adda3a6b3024-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.286845] env[63379]: INFO nova.compute.manager [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Terminating instance [ 1646.289166] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "refresh_cache-b91a5b89-0456-431d-b099-adda3a6b3024" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.289357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquired lock "refresh_cache-b91a5b89-0456-431d-b099-adda3a6b3024" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.289535] env[63379]: DEBUG nova.network.neutron [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1646.475186] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.497030] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779659, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.596169] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ff613d-e13b-4d39-ae34-99c9d381bcf5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.603717] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd8cb6a-48b8-4647-b3c8-9c17ac3ca96c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.634995] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138e80e0-9cb4-441d-bb6f-fa1fe104b803 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.642476] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18ef13c-bc52-4411-9213-3dd87c44a0db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.655481] env[63379]: DEBUG nova.compute.provider_tree [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.752503] env[63379]: INFO nova.compute.manager [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Swapping old allocation on dict_keys(['cf478c89-515f-4372-b90f-4868ab56e978']) held by migration 1d862343-7285-48b6-8ba8-374b0de20e47 for instance [ 1646.775694] env[63379]: DEBUG nova.scheduler.client.report [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Overwriting current allocation {'allocations': {'cf478c89-515f-4372-b90f-4868ab56e978': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 91}}, 'project_id': '551ba9289da4445ea0bad784aee2e86d', 'user_id': '5fa958cb524741079d651e388f00f3c4', 'consumer_generation': 1} on consumer f082cdd7-228e-4100-b301-5af6daea9b36 {{(pid=63379) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1646.807637] env[63379]: DEBUG nova.network.neutron [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1646.850353] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.850619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquired lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.850894] env[63379]: DEBUG nova.network.neutron [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1646.869497] env[63379]: DEBUG nova.network.neutron [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.977342] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.067242} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.977643] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1646.978617] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf301ff-52f4-4714-b769-e94851fbb6e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.002331] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1647.006464] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41967e9f-9ac3-46d9-a2a4-a71fd8fbb396 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.027109] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779659, 'name': ReconfigVM_Task, 'duration_secs': 2.006636} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.028381] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 5aad86f8-0b3b-43ca-982b-c670e3411c01/5aad86f8-0b3b-43ca-982b-c670e3411c01.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1647.029351] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1647.029351] env[63379]: value = "task-1779661" [ 1647.029351] env[63379]: _type = "Task" [ 1647.029351] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.029551] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33f142b4-6673-48cd-a5bf-b6390d8ba769 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.039442] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.040669] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1647.040669] env[63379]: value = "task-1779662" [ 1647.040669] env[63379]: _type = "Task" [ 1647.040669] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.049537] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779662, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.158752] env[63379]: DEBUG nova.scheduler.client.report [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1647.373357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Releasing lock "refresh_cache-b91a5b89-0456-431d-b099-adda3a6b3024" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.373998] env[63379]: DEBUG nova.compute.manager [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1647.374364] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1647.375728] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db201cba-5e8a-4fcd-b519-0fb05d79f0c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.385735] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1647.386088] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c336b7e-fc9c-4bc6-98e5-4f7da5606600 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.393447] env[63379]: DEBUG oslo_vmware.api [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1647.393447] env[63379]: value = "task-1779663" [ 1647.393447] env[63379]: _type = "Task" [ 1647.393447] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.405621] env[63379]: DEBUG oslo_vmware.api [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779663, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.549306] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779661, 'name': ReconfigVM_Task, 'duration_secs': 0.419988} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.550379] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1647.550613] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86acb540-7cf2-46e6-90dc-f734575e0771 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.555681] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779662, 'name': Rename_Task, 'duration_secs': 0.155349} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.555890] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1647.556794] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0cad998-2e6e-4925-ab59-28ba6bf51f65 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.560183] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1647.560183] env[63379]: value = "task-1779664" [ 1647.560183] env[63379]: _type = "Task" [ 1647.560183] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.564557] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1647.564557] env[63379]: value = "task-1779665" [ 1647.564557] env[63379]: _type = "Task" [ 1647.564557] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.572176] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779664, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.575087] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.628947] env[63379]: DEBUG nova.network.neutron [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance_info_cache with network_info: [{"id": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "address": "fa:16:3e:0c:14:52", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.215", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbe843e8-91", "ovs_interfaceid": "bbe843e8-9156-454e-8ba4-dae6bc31c8b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.665248] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.665806] env[63379]: DEBUG nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1647.669161] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.832s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.670654] env[63379]: INFO nova.compute.claims [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1647.904062] env[63379]: DEBUG oslo_vmware.api [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779663, 'name': PowerOffVM_Task, 'duration_secs': 0.219131} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.904347] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1647.904526] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1647.904782] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a5d81b2-0c44-4341-b0e2-abc401554a86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.931074] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1647.931344] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1647.931545] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Deleting the datastore file [datastore1] b91a5b89-0456-431d-b099-adda3a6b3024 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1647.931808] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa03d160-3e57-4fa5-9365-7c47ac264bb1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.938850] env[63379]: DEBUG oslo_vmware.api [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for the task: (returnval){ [ 1647.938850] env[63379]: value = "task-1779667" [ 1647.938850] env[63379]: _type = "Task" [ 1647.938850] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.948197] env[63379]: DEBUG oslo_vmware.api [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.072078] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779664, 'name': Rename_Task, 'duration_secs': 0.249484} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.072712] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1648.072961] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2dee48d-16e7-4189-a579-494b49f8ed68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.077313] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779665, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.082428] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1648.082428] env[63379]: value = "task-1779668" [ 1648.082428] env[63379]: _type = "Task" [ 1648.082428] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.091366] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779668, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.131375] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Releasing lock "refresh_cache-f082cdd7-228e-4100-b301-5af6daea9b36" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.132057] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1648.132451] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-feac216a-22bd-4fa8-bd93-de636c597e0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.140731] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1648.140731] env[63379]: value = "task-1779669" [ 1648.140731] env[63379]: _type = "Task" [ 1648.140731] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.150941] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.178215] env[63379]: DEBUG nova.compute.utils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1648.179683] env[63379]: DEBUG nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1648.179808] env[63379]: DEBUG nova.network.neutron [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1648.218450] env[63379]: DEBUG nova.policy [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0530d2698d245edae9ba088734adf0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e655baa29c4c88b8648d273f92ed4b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1648.449623] env[63379]: DEBUG oslo_vmware.api [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Task: {'id': task-1779667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149774} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.449901] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1648.450109] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1648.450302] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1648.450481] env[63379]: INFO nova.compute.manager [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1648.450723] env[63379]: DEBUG oslo.service.loopingcall [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1648.450916] env[63379]: DEBUG nova.compute.manager [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1648.451020] env[63379]: DEBUG nova.network.neutron [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1648.466311] env[63379]: DEBUG nova.network.neutron [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1648.496520] env[63379]: DEBUG nova.network.neutron [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Successfully created port: eb04ffa3-5012-4114-8150-3bc9329f9328 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1648.577506] env[63379]: DEBUG oslo_vmware.api [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779665, 'name': PowerOnVM_Task, 'duration_secs': 0.632398} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.577801] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1648.578019] env[63379]: INFO nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Took 10.51 seconds to spawn the instance on the hypervisor. [ 1648.578708] env[63379]: DEBUG nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1648.579094] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce780dac-12a2-4f0d-aa52-e9defa4061c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.598137] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779668, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.655013] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779669, 'name': PowerOffVM_Task, 'duration_secs': 0.27354} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.655013] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:30:29Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='fd63d07f-2af7-4c40-ac44-c2f8123389ab',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-418275153',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1648.655013] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1648.659527] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2461fbf-3e1b-420c-a095-fef78e9bfecc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.675183] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1648.675183] env[63379]: value = "task-1779670" [ 1648.675183] env[63379]: _type = "Task" [ 1648.675183] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.687797] env[63379]: DEBUG nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1648.690378] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779670, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.968188] env[63379]: DEBUG nova.network.neutron [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.072711] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d807b59e-01c7-4583-beed-deb4e7ae5282 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.080944] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24310066-1604-4196-bddc-23b6ace8fa35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.119238] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b700aa-7954-4d1c-99b3-1ace071140fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.124920] env[63379]: INFO nova.compute.manager [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Took 39.74 seconds to build instance. [ 1649.129024] env[63379]: DEBUG oslo_vmware.api [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779668, 'name': PowerOnVM_Task, 'duration_secs': 0.876426} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.129024] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1649.129024] env[63379]: DEBUG nova.compute.manager [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1649.129024] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9eb0d8-2ce3-4f2f-97ee-b455f89ccd3a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.135975] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6673ec7-3232-4427-b22f-74ae8e26f779 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.152602] env[63379]: DEBUG nova.compute.provider_tree [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1649.186367] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779670, 'name': ReconfigVM_Task, 'duration_secs': 0.267247} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.187318] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca52cf8-9da8-48a4-b676-96fcf46f8d6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.208787] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:30:29Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='fd63d07f-2af7-4c40-ac44-c2f8123389ab',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-418275153',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1649.208787] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1649.208936] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1649.209131] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1649.209288] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1649.209471] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1649.209653] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1649.209816] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1649.209992] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1649.210180] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1649.210361] env[63379]: DEBUG nova.virt.hardware [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1649.211425] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e62d5db6-b46c-4118-99c2-638a115c0a8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.216816] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1649.216816] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af" [ 1649.216816] env[63379]: _type = "Task" [ 1649.216816] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.226167] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.470643] env[63379]: INFO nova.compute.manager [-] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Took 1.02 seconds to deallocate network for instance. [ 1649.627129] env[63379]: DEBUG oslo_concurrency.lockutils [None req-da0de8f7-60c8-45b1-8015-e7c09fd22723 tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.252s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.650096] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.656214] env[63379]: DEBUG nova.scheduler.client.report [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1649.712877] env[63379]: DEBUG nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1649.729082] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.737680] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1649.738021] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1649.738277] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1649.738569] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1649.738805] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1649.739059] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1649.739381] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1649.739638] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1649.739898] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1649.740179] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1649.740460] env[63379]: DEBUG nova.virt.hardware [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1649.742031] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3da1fd-f109-429a-bad1-cd257a854c6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.751141] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8f9df9-18ff-44c2-aeb3-bc69f8e467f8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.974786] env[63379]: DEBUG nova.compute.manager [req-9e2181a4-58a3-4833-9eec-33a95b81d681 req-5d926d46-4e00-425f-8c70-cd34663a92ed service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Received event network-vif-plugged-eb04ffa3-5012-4114-8150-3bc9329f9328 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1649.974786] env[63379]: DEBUG oslo_concurrency.lockutils [req-9e2181a4-58a3-4833-9eec-33a95b81d681 req-5d926d46-4e00-425f-8c70-cd34663a92ed service nova] Acquiring lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.974786] env[63379]: DEBUG oslo_concurrency.lockutils [req-9e2181a4-58a3-4833-9eec-33a95b81d681 req-5d926d46-4e00-425f-8c70-cd34663a92ed service nova] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.974786] env[63379]: DEBUG oslo_concurrency.lockutils [req-9e2181a4-58a3-4833-9eec-33a95b81d681 req-5d926d46-4e00-425f-8c70-cd34663a92ed service nova] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.974974] env[63379]: DEBUG nova.compute.manager [req-9e2181a4-58a3-4833-9eec-33a95b81d681 req-5d926d46-4e00-425f-8c70-cd34663a92ed service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] No waiting events found dispatching network-vif-plugged-eb04ffa3-5012-4114-8150-3bc9329f9328 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1649.975163] env[63379]: WARNING nova.compute.manager [req-9e2181a4-58a3-4833-9eec-33a95b81d681 req-5d926d46-4e00-425f-8c70-cd34663a92ed service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Received unexpected event network-vif-plugged-eb04ffa3-5012-4114-8150-3bc9329f9328 for instance with vm_state building and task_state spawning. [ 1649.976406] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.160565] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.161100] env[63379]: DEBUG nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1650.163696] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.181s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.165356] env[63379]: DEBUG nova.objects.instance [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lazy-loading 'resources' on Instance uuid 04234ba7-24a3-48e5-9f62-6f4dddd0054a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1650.231740] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.293754] env[63379]: DEBUG nova.network.neutron [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Successfully updated port: eb04ffa3-5012-4114-8150-3bc9329f9328 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1650.296169] env[63379]: INFO nova.compute.manager [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Rebuilding instance [ 1650.344725] env[63379]: DEBUG nova.compute.manager [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1650.345886] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983a230c-4235-4c80-ac40-c80d4d3cfb0f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.469755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "5aad86f8-0b3b-43ca-982b-c670e3411c01" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.469755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.469755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "5aad86f8-0b3b-43ca-982b-c670e3411c01-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.469934] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.470084] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.472630] env[63379]: INFO nova.compute.manager [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Terminating instance [ 1650.474378] env[63379]: DEBUG nova.compute.manager [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1650.474589] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1650.475452] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9760f1d-004b-43dc-abd3-611fdddf36b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.482695] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1650.482925] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-187a049a-2196-49b6-a3b8-a9ffe2ef184d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.489725] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1650.489725] env[63379]: value = "task-1779671" [ 1650.489725] env[63379]: _type = "Task" [ 1650.489725] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.497816] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.667773] env[63379]: DEBUG nova.compute.utils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1650.672041] env[63379]: DEBUG nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1650.672136] env[63379]: DEBUG nova.network.neutron [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1650.726372] env[63379]: DEBUG nova.policy [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17d08e0b690e41059dd859cab218dab7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b124411aac0544d6834ff8f5c2b84bd5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1650.733433] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.798178] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "refresh_cache-36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.798360] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired lock "refresh_cache-36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.798532] env[63379]: DEBUG nova.network.neutron [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1650.857052] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1650.857378] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af0d2c44-5d3b-41c7-a903-c05270e0906f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.870179] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1650.870179] env[63379]: value = "task-1779672" [ 1650.870179] env[63379]: _type = "Task" [ 1650.870179] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.884894] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.002713] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.015806] env[63379]: DEBUG nova.network.neutron [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Successfully created port: ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1651.065655] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1712c7d9-242c-476a-b0b7-7497b7f8ac22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.073907] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf1771e-e236-4c5d-be2f-7c65e623d05d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.106399] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5d7448-8932-482e-820d-80eb7763d9e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.112670] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d95c84-fd9c-48f2-acb6-02617f2a501e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.127721] env[63379]: DEBUG nova.compute.provider_tree [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.172164] env[63379]: DEBUG nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1651.229216] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.338997] env[63379]: DEBUG nova.network.neutron [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1651.384526] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.508268] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.539692] env[63379]: DEBUG nova.network.neutron [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Updating instance_info_cache with network_info: [{"id": "eb04ffa3-5012-4114-8150-3bc9329f9328", "address": "fa:16:3e:21:3d:37", "network": {"id": "42ec9777-27c5-4516-be87-12d549df72cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1493935153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e655baa29c4c88b8648d273f92ed4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb04ffa3-50", "ovs_interfaceid": "eb04ffa3-5012-4114-8150-3bc9329f9328", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.632935] env[63379]: DEBUG nova.scheduler.client.report [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.729259] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.884067] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.999641] env[63379]: DEBUG nova.compute.manager [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Received event network-changed-eb04ffa3-5012-4114-8150-3bc9329f9328 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1651.999869] env[63379]: DEBUG nova.compute.manager [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Refreshing instance network info cache due to event network-changed-eb04ffa3-5012-4114-8150-3bc9329f9328. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1652.000121] env[63379]: DEBUG oslo_concurrency.lockutils [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] Acquiring lock "refresh_cache-36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.006521] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.042063] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Releasing lock "refresh_cache-36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.042387] env[63379]: DEBUG nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Instance network_info: |[{"id": "eb04ffa3-5012-4114-8150-3bc9329f9328", "address": "fa:16:3e:21:3d:37", "network": {"id": "42ec9777-27c5-4516-be87-12d549df72cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1493935153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e655baa29c4c88b8648d273f92ed4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb04ffa3-50", "ovs_interfaceid": "eb04ffa3-5012-4114-8150-3bc9329f9328", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1652.042666] env[63379]: DEBUG oslo_concurrency.lockutils [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] Acquired lock "refresh_cache-36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.042846] env[63379]: DEBUG nova.network.neutron [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Refreshing network info cache for port eb04ffa3-5012-4114-8150-3bc9329f9328 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1652.046019] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:3d:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f8442aa5-73db-4599-8564-b98a6ea26b9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb04ffa3-5012-4114-8150-3bc9329f9328', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1652.052971] env[63379]: DEBUG oslo.service.loopingcall [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1652.055634] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1652.056135] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-561617cb-3917-4d4e-8196-ea8f558e4084 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.078809] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1652.078809] env[63379]: value = "task-1779673" [ 1652.078809] env[63379]: _type = "Task" [ 1652.078809] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.086494] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779673, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.137495] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.974s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.139622] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.231s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.141413] env[63379]: INFO nova.compute.claims [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1652.160855] env[63379]: INFO nova.scheduler.client.report [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Deleted allocations for instance 04234ba7-24a3-48e5-9f62-6f4dddd0054a [ 1652.181057] env[63379]: DEBUG nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1652.208852] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1652.209142] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1652.209313] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1652.209536] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1652.209686] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1652.209824] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1652.210050] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1652.210218] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1652.210386] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1652.210550] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1652.210727] env[63379]: DEBUG nova.virt.hardware [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1652.211619] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c8916b-ea30-4e0e-9f53-e09b02820fd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.221050] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9df20b3-428d-434e-acfa-fd9ce49e2902 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.239998] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.286094] env[63379]: DEBUG nova.network.neutron [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Updated VIF entry in instance network info cache for port eb04ffa3-5012-4114-8150-3bc9329f9328. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1652.286481] env[63379]: DEBUG nova.network.neutron [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Updating instance_info_cache with network_info: [{"id": "eb04ffa3-5012-4114-8150-3bc9329f9328", "address": "fa:16:3e:21:3d:37", "network": {"id": "42ec9777-27c5-4516-be87-12d549df72cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1493935153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86e655baa29c4c88b8648d273f92ed4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f8442aa5-73db-4599-8564-b98a6ea26b9b", "external-id": "nsx-vlan-transportzone-893", "segmentation_id": 893, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb04ffa3-50", "ovs_interfaceid": "eb04ffa3-5012-4114-8150-3bc9329f9328", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.384309] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.505608] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.592020] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779673, 'name': CreateVM_Task, 'duration_secs': 0.3345} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.592020] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1652.592020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.592020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.592020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1652.592020] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef7a2f8f-d341-41a3-a7ee-f2dc8253b0a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.596258] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1652.596258] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c844fb-dac7-5023-7ce2-97aaebc7189f" [ 1652.596258] env[63379]: _type = "Task" [ 1652.596258] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.605621] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c844fb-dac7-5023-7ce2-97aaebc7189f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.636588] env[63379]: DEBUG nova.network.neutron [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Successfully updated port: ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1652.669388] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f33e9264-22c2-494a-97d8-891c2fcad412 tempest-SecurityGroupsTestJSON-934629976 tempest-SecurityGroupsTestJSON-934629976-project-member] Lock "04234ba7-24a3-48e5-9f62-6f4dddd0054a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.180s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.734949] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.789706] env[63379]: DEBUG oslo_concurrency.lockutils [req-c7c6ce0e-4718-48a4-baad-fdefad0fa036 req-ed020ae3-9b18-4239-8fdf-5a2aa29cb6a9 service nova] Releasing lock "refresh_cache-36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.885711] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.008387] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.111023] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c844fb-dac7-5023-7ce2-97aaebc7189f, 'name': SearchDatastore_Task, 'duration_secs': 0.314175} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.111023] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.111023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1653.111023] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.111023] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.111023] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1653.111023] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ad5b0a1-266f-45ab-a6b7-83eb5206bbe1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.118176] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1653.118550] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1653.119381] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23467d6f-9603-465b-9bad-ec9e578d31b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.124522] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1653.124522] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a11672-14fa-cc8f-76b9-bcbe0243a0e7" [ 1653.124522] env[63379]: _type = "Task" [ 1653.124522] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.132363] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a11672-14fa-cc8f-76b9-bcbe0243a0e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.140095] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.140385] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquired lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.140632] env[63379]: DEBUG nova.network.neutron [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1653.239616] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d2927a-8c49-09d8-1d49-73bbca9312af, 'name': SearchDatastore_Task, 'duration_secs': 3.688704} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.249161] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfiguring VM instance instance-00000033 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1653.249655] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd2f07f2-3464-417e-9629-5865e300cae7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.271044] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1653.271044] env[63379]: value = "task-1779674" [ 1653.271044] env[63379]: _type = "Task" [ 1653.271044] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.281250] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779674, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.385939] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779672, 'name': PowerOffVM_Task, 'duration_secs': 2.250877} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.388486] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1653.388718] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1653.390145] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c32e449-4728-4fb1-b1a1-145a89ffb119 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.397196] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1653.397196] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb7d69af-a3f6-4963-b089-2b285e86fe05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.493946] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1653.494211] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1653.494397] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleting the datastore file [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1653.494737] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b40b4fcc-3b72-4898-b08d-6d43daabaa5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.504132] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1653.504132] env[63379]: value = "task-1779676" [ 1653.504132] env[63379]: _type = "Task" [ 1653.504132] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.510278] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779671, 'name': PowerOffVM_Task, 'duration_secs': 2.626552} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.510895] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1653.511089] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1653.511335] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72d4abd0-2a33-4035-b839-6b7d0a613ba7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.515407] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779676, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.577032] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd719e6-33bb-4c01-81d9-4f63a78dae9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.588171] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90825a5b-ae96-4c1e-9953-539c5ac4af17 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.591607] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1653.591845] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1653.592063] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Deleting the datastore file [datastore1] 5aad86f8-0b3b-43ca-982b-c670e3411c01 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1653.592329] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b537a36-4960-4bed-a733-ea7b65ac6dff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.622302] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafbf7ad-743c-43df-a7d9-968fd5c1c655 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.625437] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for the task: (returnval){ [ 1653.625437] env[63379]: value = "task-1779678" [ 1653.625437] env[63379]: _type = "Task" [ 1653.625437] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.634782] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e60dd9-46e2-4187-a82f-53c58ac79bdf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.644289] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a11672-14fa-cc8f-76b9-bcbe0243a0e7, 'name': SearchDatastore_Task, 'duration_secs': 0.007907} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.646250] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.647401] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-707d1f0b-4496-49f8-bd4f-25a792f43b5d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.659018] env[63379]: DEBUG nova.compute.provider_tree [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1653.664718] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1653.664718] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5218a234-c011-6487-9f50-4998c9c82716" [ 1653.664718] env[63379]: _type = "Task" [ 1653.664718] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.672523] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5218a234-c011-6487-9f50-4998c9c82716, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.691495] env[63379]: DEBUG nova.network.neutron [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1653.781064] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779674, 'name': ReconfigVM_Task, 'duration_secs': 0.174005} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.781560] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfigured VM instance instance-00000033 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1653.782145] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43f4d24-6649-44fd-bffe-a7c06d122e84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.803798] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1653.804121] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e78cf9c-adb6-4de9-b263-7663f2905d6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.825125] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1653.825125] env[63379]: value = "task-1779679" [ 1653.825125] env[63379]: _type = "Task" [ 1653.825125] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.833235] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.857294] env[63379]: DEBUG nova.network.neutron [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [{"id": "ae27d114-783b-4d6d-89ea-22959da9b86f", "address": "fa:16:3e:2a:2d:02", "network": {"id": "f746cc0e-3c0e-4c9c-b2fc-2e87ec1838e1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1847805430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b124411aac0544d6834ff8f5c2b84bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae27d114-78", "ovs_interfaceid": "ae27d114-783b-4d6d-89ea-22959da9b86f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.015016] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779676, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159706} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.015325] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1654.015522] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1654.015709] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1654.034786] env[63379]: DEBUG nova.compute.manager [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received event network-vif-plugged-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1654.035089] env[63379]: DEBUG oslo_concurrency.lockutils [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] Acquiring lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.035287] env[63379]: DEBUG oslo_concurrency.lockutils [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.035403] env[63379]: DEBUG oslo_concurrency.lockutils [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.035579] env[63379]: DEBUG nova.compute.manager [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] No waiting events found dispatching network-vif-plugged-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1654.035763] env[63379]: WARNING nova.compute.manager [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received unexpected event network-vif-plugged-ae27d114-783b-4d6d-89ea-22959da9b86f for instance with vm_state building and task_state spawning. [ 1654.035970] env[63379]: DEBUG nova.compute.manager [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1654.036193] env[63379]: DEBUG nova.compute.manager [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing instance network info cache due to event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1654.036391] env[63379]: DEBUG oslo_concurrency.lockutils [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] Acquiring lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.134633] env[63379]: DEBUG oslo_vmware.api [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Task: {'id': task-1779678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158745} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.134903] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1654.136295] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1654.136295] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1654.136295] env[63379]: INFO nova.compute.manager [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Took 3.66 seconds to destroy the instance on the hypervisor. [ 1654.136295] env[63379]: DEBUG oslo.service.loopingcall [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1654.136295] env[63379]: DEBUG nova.compute.manager [-] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1654.136513] env[63379]: DEBUG nova.network.neutron [-] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1654.163055] env[63379]: DEBUG nova.scheduler.client.report [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1654.181317] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5218a234-c011-6487-9f50-4998c9c82716, 'name': SearchDatastore_Task, 'duration_secs': 0.010211} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.182165] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.182434] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f/36681a38-7cfd-44cf-8b8f-1f4dfb613f4f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1654.182693] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a8ec282-3bff-47b7-9b1c-82e5ebf15856 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.191788] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1654.191788] env[63379]: value = "task-1779680" [ 1654.191788] env[63379]: _type = "Task" [ 1654.191788] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.200483] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779680, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.336333] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779679, 'name': ReconfigVM_Task, 'duration_secs': 0.30462} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.336333] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Reconfigured VM instance instance-00000033 to attach disk [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36/f082cdd7-228e-4100-b301-5af6daea9b36.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1654.337695] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52afb33-3c55-49bd-bfc6-57afb4d811a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.359789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Releasing lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.359931] env[63379]: DEBUG nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Instance network_info: |[{"id": "ae27d114-783b-4d6d-89ea-22959da9b86f", "address": "fa:16:3e:2a:2d:02", "network": {"id": "f746cc0e-3c0e-4c9c-b2fc-2e87ec1838e1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1847805430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b124411aac0544d6834ff8f5c2b84bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae27d114-78", "ovs_interfaceid": "ae27d114-783b-4d6d-89ea-22959da9b86f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1654.362817] env[63379]: DEBUG oslo_concurrency.lockutils [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] Acquired lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.362817] env[63379]: DEBUG nova.network.neutron [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1654.364924] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:2d:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae27d114-783b-4d6d-89ea-22959da9b86f', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1654.375290] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Creating folder: Project (b124411aac0544d6834ff8f5c2b84bd5). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1654.375290] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18fedb9-b5d9-477b-be51-5bda4d84d9e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.381464] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c259c0a9-1d27-440d-b453-28af842cc3bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.406467] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9378918a-448f-46c9-99c1-1fa2279e4852 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.409882] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Created folder: Project (b124411aac0544d6834ff8f5c2b84bd5) in parent group-v369214. [ 1654.410335] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Creating folder: Instances. Parent ref: group-v369406. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1654.410719] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e06b77a-86a7-406f-8722-03ed37b74410 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.438278] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6b20ef-83e0-4cfa-a250-025fc904f45c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.441373] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Created folder: Instances in parent group-v369406. [ 1654.441462] env[63379]: DEBUG oslo.service.loopingcall [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1654.442065] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1654.442941] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-326246e1-d9d2-415c-81c9-802fd2a4358a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.472031] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1654.472864] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3364f451-4489-4509-9a90-e6c0bb35446f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.478450] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1654.478450] env[63379]: value = "task-1779683" [ 1654.478450] env[63379]: _type = "Task" [ 1654.478450] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.479883] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1654.479883] env[63379]: value = "task-1779684" [ 1654.479883] env[63379]: _type = "Task" [ 1654.479883] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.493871] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.494165] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779683, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.671943] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.672510] env[63379]: DEBUG nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1654.675686] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.580s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.676087] env[63379]: DEBUG nova.objects.instance [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lazy-loading 'resources' on Instance uuid 8b07ef47-3615-41a5-acfd-87c1ad43b4b9 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1654.683817] env[63379]: DEBUG nova.network.neutron [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updated VIF entry in instance network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1654.684029] env[63379]: DEBUG nova.network.neutron [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [{"id": "ae27d114-783b-4d6d-89ea-22959da9b86f", "address": "fa:16:3e:2a:2d:02", "network": {"id": "f746cc0e-3c0e-4c9c-b2fc-2e87ec1838e1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1847805430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b124411aac0544d6834ff8f5c2b84bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae27d114-78", "ovs_interfaceid": "ae27d114-783b-4d6d-89ea-22959da9b86f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.701172] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779680, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.888447] env[63379]: DEBUG nova.network.neutron [-] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.993162] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779683, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.996912] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779684, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.059613] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1655.059899] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1655.060077] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1655.060279] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1655.060429] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1655.060582] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1655.060795] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1655.060958] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1655.061187] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1655.061371] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1655.061552] env[63379]: DEBUG nova.virt.hardware [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1655.062451] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a353d56-78a1-4710-826f-16c8a928e47f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.071359] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8ce2ec-b0df-4ced-a3e8-79f34a585c4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.085929] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:be:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '021a6cdc-585b-40dc-a330-d328102cf80c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1655.093822] env[63379]: DEBUG oslo.service.loopingcall [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1655.094096] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1655.094324] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d98d2478-0699-4fcb-90a1-19180cdf926c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.112576] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1655.112576] env[63379]: value = "task-1779685" [ 1655.112576] env[63379]: _type = "Task" [ 1655.112576] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.120255] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779685, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.179690] env[63379]: DEBUG nova.compute.utils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.181150] env[63379]: DEBUG nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1655.181324] env[63379]: DEBUG nova.network.neutron [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1655.186326] env[63379]: DEBUG oslo_concurrency.lockutils [req-ab5852cc-75b5-48ab-8a19-4c4f424700a3 req-fbace72d-6453-4c69-a645-f139b1196ce6 service nova] Releasing lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1655.201849] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779680, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790233} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.202196] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f/36681a38-7cfd-44cf-8b8f-1f4dfb613f4f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1655.202415] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1655.202727] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49d42cf9-2edd-425c-b1ce-491f07a5a6bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.211647] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1655.211647] env[63379]: value = "task-1779686" [ 1655.211647] env[63379]: _type = "Task" [ 1655.211647] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.220711] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779686, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.235488] env[63379]: DEBUG nova.policy [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '756ff556130a4855b461899fece1e1fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3363a90de2d4d5988ddd03974c10d0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1655.392541] env[63379]: INFO nova.compute.manager [-] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Took 1.26 seconds to deallocate network for instance. [ 1655.495438] env[63379]: DEBUG oslo_vmware.api [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779684, 'name': PowerOnVM_Task, 'duration_secs': 0.788506} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.501253] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1655.505183] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779683, 'name': CreateVM_Task, 'duration_secs': 0.798225} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.506502] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1655.506770] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.506934] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1655.509072] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1655.509072] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb8a6b6d-76d9-4c4f-b568-a913442f2e1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.511943] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1655.511943] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52488f31-7b09-8c46-163d-c0424f1dac7c" [ 1655.511943] env[63379]: _type = "Task" [ 1655.511943] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.521101] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52488f31-7b09-8c46-163d-c0424f1dac7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.572535] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74828f9b-a5a6-4fb2-8963-9adba2e689f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.576227] env[63379]: DEBUG nova.network.neutron [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Successfully created port: 6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1655.583325] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbba7313-4dcd-4c0e-843f-a6016e358d84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.626273] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f6eaef-b82b-467d-a461-7b9e23e33810 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.636019] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779685, 'name': CreateVM_Task, 'duration_secs': 0.426931} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.639105] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1655.639105] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.640017] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1e44ac-b7e1-4688-8534-2d0d4d06b274 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.654184] env[63379]: DEBUG nova.compute.provider_tree [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1655.689182] env[63379]: DEBUG nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1655.722675] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779686, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075813} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.722794] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1655.723532] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a4258d-c750-4d2c-89c0-0b1021de8f12 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.746776] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f/36681a38-7cfd-44cf-8b8f-1f4dfb613f4f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1655.747480] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b2f14f5-fe9f-48a7-8c6e-da5026ab334c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.767984] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1655.767984] env[63379]: value = "task-1779687" [ 1655.767984] env[63379]: _type = "Task" [ 1655.767984] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.776639] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779687, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.899562] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.025832] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52488f31-7b09-8c46-163d-c0424f1dac7c, 'name': SearchDatastore_Task, 'duration_secs': 0.02337} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.026322] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.026596] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1656.026856] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.028414] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.028414] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1656.028414] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.028414] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1656.028414] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-532f2356-992d-4e42-a02a-506a519fa9c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.030445] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76211440-05fd-4cf8-8c40-2ae532ab2f26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.036555] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1656.036555] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520bbbc8-d7b1-5504-0727-a3f8a220064d" [ 1656.036555] env[63379]: _type = "Task" [ 1656.036555] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.041175] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1656.041373] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1656.042470] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde8cfde-752b-48c6-b597-a1dbaeddc18a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.047728] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520bbbc8-d7b1-5504-0727-a3f8a220064d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.050756] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1656.050756] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a73d57-0bfa-27fd-7193-25245e539070" [ 1656.050756] env[63379]: _type = "Task" [ 1656.050756] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.059344] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a73d57-0bfa-27fd-7193-25245e539070, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.071242] env[63379]: DEBUG nova.compute.manager [req-08699605-38fd-40ca-928d-7955ed42004e req-695d62e2-a5ea-46d1-93b8-3fb7033a53ba service nova] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Received event network-vif-deleted-333b52cb-3eba-421d-a26f-dc741d462410 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1656.157692] env[63379]: DEBUG nova.scheduler.client.report [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1656.277660] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779687, 'name': ReconfigVM_Task, 'duration_secs': 0.471022} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.277952] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f/36681a38-7cfd-44cf-8b8f-1f4dfb613f4f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1656.278670] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bc0560a-14fc-4c7e-b097-fda2c7d131b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.285787] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1656.285787] env[63379]: value = "task-1779688" [ 1656.285787] env[63379]: _type = "Task" [ 1656.285787] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.294204] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779688, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.511791] env[63379]: INFO nova.compute.manager [None req-8220404a-da6a-4d94-9fef-e59c053d8328 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance to original state: 'active' [ 1656.547287] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520bbbc8-d7b1-5504-0727-a3f8a220064d, 'name': SearchDatastore_Task, 'duration_secs': 0.021428} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.547523] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.547766] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1656.547989] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.560353] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a73d57-0bfa-27fd-7193-25245e539070, 'name': SearchDatastore_Task, 'duration_secs': 0.038099} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.561127] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50fdf4e2-118a-4e5b-a10f-d79ef9fa739a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.566381] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1656.566381] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525d33d0-eaa3-9ef5-49a7-1bc8647b2f2e" [ 1656.566381] env[63379]: _type = "Task" [ 1656.566381] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.574204] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525d33d0-eaa3-9ef5-49a7-1bc8647b2f2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.662905] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.987s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.665402] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.492s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.665651] env[63379]: DEBUG nova.objects.instance [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lazy-loading 'resources' on Instance uuid 08465a2c-1ab6-4c53-9b12-3cd51c717b03 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1656.685009] env[63379]: INFO nova.scheduler.client.report [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted allocations for instance 8b07ef47-3615-41a5-acfd-87c1ad43b4b9 [ 1656.696911] env[63379]: DEBUG nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1656.724425] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1656.724641] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1656.724802] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1656.724990] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1656.725159] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1656.725310] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1656.725539] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1656.725778] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1656.726011] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1656.726205] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1656.726387] env[63379]: DEBUG nova.virt.hardware [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1656.727513] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883a482e-80b5-4646-a84f-4fda0774e030 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.737050] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec125621-9d8d-41cd-95a2-ef4169ced232 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.795604] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779688, 'name': Rename_Task, 'duration_secs': 0.141612} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.795920] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1656.796163] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1025d0a7-8138-4960-85b9-3a0d17e822ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.802818] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1656.802818] env[63379]: value = "task-1779689" [ 1656.802818] env[63379]: _type = "Task" [ 1656.802818] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.810054] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.038797] env[63379]: DEBUG nova.compute.manager [req-c37cea9c-ab7c-4e65-9e45-bfd6e5f9cfeb req-ee7d4fb3-9d31-4f08-849c-fca5f5e99c1c service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Received event network-vif-plugged-6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1657.038797] env[63379]: DEBUG oslo_concurrency.lockutils [req-c37cea9c-ab7c-4e65-9e45-bfd6e5f9cfeb req-ee7d4fb3-9d31-4f08-849c-fca5f5e99c1c service nova] Acquiring lock "510db409-0b4c-494a-8084-39ef3cd6c918-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.040092] env[63379]: DEBUG oslo_concurrency.lockutils [req-c37cea9c-ab7c-4e65-9e45-bfd6e5f9cfeb req-ee7d4fb3-9d31-4f08-849c-fca5f5e99c1c service nova] Lock "510db409-0b4c-494a-8084-39ef3cd6c918-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.040092] env[63379]: DEBUG oslo_concurrency.lockutils [req-c37cea9c-ab7c-4e65-9e45-bfd6e5f9cfeb req-ee7d4fb3-9d31-4f08-849c-fca5f5e99c1c service nova] Lock "510db409-0b4c-494a-8084-39ef3cd6c918-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.040092] env[63379]: DEBUG nova.compute.manager [req-c37cea9c-ab7c-4e65-9e45-bfd6e5f9cfeb req-ee7d4fb3-9d31-4f08-849c-fca5f5e99c1c service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] No waiting events found dispatching network-vif-plugged-6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1657.040092] env[63379]: WARNING nova.compute.manager [req-c37cea9c-ab7c-4e65-9e45-bfd6e5f9cfeb req-ee7d4fb3-9d31-4f08-849c-fca5f5e99c1c service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Received unexpected event network-vif-plugged-6cdabd2b-f665-46a9-a86e-2527cfe452bf for instance with vm_state building and task_state spawning. [ 1657.077571] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525d33d0-eaa3-9ef5-49a7-1bc8647b2f2e, 'name': SearchDatastore_Task, 'duration_secs': 0.01318} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.077856] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.078141] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/f087b3ac-13e2-4e55-a3ce-5e6bd3379239.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1657.078424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.078609] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1657.078819] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d64be882-e9c3-4072-b8ea-69605ff2df48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.081375] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa966579-9446-48c5-ac83-79ce024c034f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.090475] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1657.090650] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1657.092313] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-857c34ba-04b3-4d49-a516-02e66caa4df0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.094818] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1657.094818] env[63379]: value = "task-1779690" [ 1657.094818] env[63379]: _type = "Task" [ 1657.094818] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.099747] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1657.099747] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52965e78-d2bd-8cbe-5be3-4e6d53e1a314" [ 1657.099747] env[63379]: _type = "Task" [ 1657.099747] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.106147] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.111161] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52965e78-d2bd-8cbe-5be3-4e6d53e1a314, 'name': SearchDatastore_Task, 'duration_secs': 0.008216} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.111921] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-718f857d-d844-4030-8d62-01f1e497fd0a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.116731] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1657.116731] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52669a33-96b5-3fbf-b84a-6754650fb9ea" [ 1657.116731] env[63379]: _type = "Task" [ 1657.116731] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.124518] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52669a33-96b5-3fbf-b84a-6754650fb9ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.199890] env[63379]: DEBUG oslo_concurrency.lockutils [None req-36a06ee9-ab71-4c32-b967-60f4dbe43cce tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b07ef47-3615-41a5-acfd-87c1ad43b4b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.656s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.202958] env[63379]: DEBUG nova.network.neutron [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Successfully updated port: 6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1657.314440] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779689, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.566970] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6516849-87c6-42fe-82a6-0132d84b4f0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.577242] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016e8acf-3c46-468a-920e-e3245a64fcd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.615936] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da30469-9bc4-48a4-aa35-68322add724e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.625528] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779690, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.629266] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6b4f19-779b-4daf-a6d7-0cf3a5ddd9e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.636880] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52669a33-96b5-3fbf-b84a-6754650fb9ea, 'name': SearchDatastore_Task, 'duration_secs': 0.009435} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.637573] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.637849] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1657.638134] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebe42048-743a-4b33-94ae-8cab681b8c23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.648986] env[63379]: DEBUG nova.compute.provider_tree [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1657.656104] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1657.656104] env[63379]: value = "task-1779691" [ 1657.656104] env[63379]: _type = "Task" [ 1657.656104] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.667072] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779691, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.705742] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.705957] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.706168] env[63379]: DEBUG nova.network.neutron [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1657.814465] env[63379]: DEBUG oslo_vmware.api [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779689, 'name': PowerOnVM_Task, 'duration_secs': 0.701929} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.814789] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1657.814998] env[63379]: INFO nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1657.815202] env[63379]: DEBUG nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1657.816029] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d4d1f7-8fad-4613-8763-f24108880424 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.037095] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "f082cdd7-228e-4100-b301-5af6daea9b36" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.037400] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.037622] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.038030] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.038030] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.040049] env[63379]: INFO nova.compute.manager [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Terminating instance [ 1658.041917] env[63379]: DEBUG nova.compute.manager [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1658.042142] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1658.043012] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c28759-1972-464f-a798-4976f8dbf59b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.051697] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1658.051965] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdc2ee70-aeb8-4e71-afd7-da764c617e9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.059483] env[63379]: DEBUG oslo_vmware.api [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1658.059483] env[63379]: value = "task-1779692" [ 1658.059483] env[63379]: _type = "Task" [ 1658.059483] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.067979] env[63379]: DEBUG oslo_vmware.api [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779692, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.121626] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613974} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.121921] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/f087b3ac-13e2-4e55-a3ce-5e6bd3379239.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1658.122162] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1658.122440] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48ef5206-8fbe-464e-81af-23624ae67976 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.129413] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1658.129413] env[63379]: value = "task-1779693" [ 1658.129413] env[63379]: _type = "Task" [ 1658.129413] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.138707] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.153222] env[63379]: DEBUG nova.scheduler.client.report [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1658.167582] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779691, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.268188] env[63379]: DEBUG nova.network.neutron [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1658.333834] env[63379]: INFO nova.compute.manager [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Took 42.32 seconds to build instance. [ 1658.514622] env[63379]: DEBUG nova.network.neutron [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [{"id": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "address": "fa:16:3e:bc:a5:55", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cdabd2b-f6", "ovs_interfaceid": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.526032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "f10fe64d-a09e-488a-b609-3e38922cf2e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.526206] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.526426] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "f10fe64d-a09e-488a-b609-3e38922cf2e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.526612] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.526789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.529689] env[63379]: INFO nova.compute.manager [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Terminating instance [ 1658.531370] env[63379]: DEBUG nova.compute.manager [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1658.531569] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1658.532415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816f5de6-af18-4735-ae0d-27415c60ce0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.542144] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1658.542660] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2d49c13-de60-486f-990b-61e8063f2d01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.548838] env[63379]: DEBUG oslo_vmware.api [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1658.548838] env[63379]: value = "task-1779694" [ 1658.548838] env[63379]: _type = "Task" [ 1658.548838] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.557481] env[63379]: DEBUG oslo_vmware.api [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779694, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.575420] env[63379]: DEBUG oslo_vmware.api [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779692, 'name': PowerOffVM_Task, 'duration_secs': 0.412763} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.575420] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1658.575420] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1658.575420] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5693cf3-a35e-4121-aa20-1e95f1b99091 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.640726] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071783} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.641100] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1658.641916] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba207a2-ffcc-428a-a1e5-0c3b8a55917c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.666938] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/f087b3ac-13e2-4e55-a3ce-5e6bd3379239.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1658.671181] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.673029] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e430ae72-a2d4-40d5-92d9-45ab066d741b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.693572] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.918s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.695585] env[63379]: INFO nova.compute.claims [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1658.705899] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779691, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68603} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.706988] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1658.707241] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1658.707564] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1658.707564] env[63379]: value = "task-1779696" [ 1658.707564] env[63379]: _type = "Task" [ 1658.707564] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.707755] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11d1d448-94ae-4a39-89a1-cce934627f2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.714860] env[63379]: INFO nova.scheduler.client.report [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Deleted allocations for instance 08465a2c-1ab6-4c53-9b12-3cd51c717b03 [ 1658.723711] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779696, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.724322] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1658.724322] env[63379]: value = "task-1779697" [ 1658.724322] env[63379]: _type = "Task" [ 1658.724322] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.734021] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.744932] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1658.744932] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1658.744932] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Deleting the datastore file [datastore1] f082cdd7-228e-4100-b301-5af6daea9b36 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1658.744932] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-367ddeca-7d53-43ea-b6f0-e6ebc22725d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.752221] env[63379]: DEBUG oslo_vmware.api [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1658.752221] env[63379]: value = "task-1779698" [ 1658.752221] env[63379]: _type = "Task" [ 1658.752221] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.762577] env[63379]: DEBUG oslo_vmware.api [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779698, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.836397] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a253c6da-719e-4dfb-8f2f-471d156ebff0 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.830s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.020015] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.020015] env[63379]: DEBUG nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Instance network_info: |[{"id": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "address": "fa:16:3e:bc:a5:55", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cdabd2b-f6", "ovs_interfaceid": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1659.020015] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:a5:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6cdabd2b-f665-46a9-a86e-2527cfe452bf', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1659.026500] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating folder: Project (a3363a90de2d4d5988ddd03974c10d0a). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1659.026976] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9ca6517-c80a-4a49-9bf6-fc9e84109bab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.038982] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created folder: Project (a3363a90de2d4d5988ddd03974c10d0a) in parent group-v369214. [ 1659.040338] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating folder: Instances. Parent ref: group-v369410. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1659.040338] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bbd1d39-5379-467d-a456-8f410d1888f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.055194] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created folder: Instances in parent group-v369410. [ 1659.055194] env[63379]: DEBUG oslo.service.loopingcall [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.055194] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1659.055835] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cf2fb37-8a47-4c47-8aa2-cffc65d8175c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.073394] env[63379]: DEBUG oslo_vmware.api [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779694, 'name': PowerOffVM_Task, 'duration_secs': 0.207776} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.074569] env[63379]: DEBUG nova.compute.manager [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Received event network-changed-6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1659.074758] env[63379]: DEBUG nova.compute.manager [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Refreshing instance network info cache due to event network-changed-6cdabd2b-f665-46a9-a86e-2527cfe452bf. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1659.075016] env[63379]: DEBUG oslo_concurrency.lockutils [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] Acquiring lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.075265] env[63379]: DEBUG oslo_concurrency.lockutils [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] Acquired lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.075532] env[63379]: DEBUG nova.network.neutron [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Refreshing network info cache for port 6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1659.077221] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1659.077325] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1659.077963] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6935e7f0-1689-4e94-803c-d904e6cebb85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.081872] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1659.081872] env[63379]: value = "task-1779701" [ 1659.081872] env[63379]: _type = "Task" [ 1659.081872] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.090832] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779701, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.158352] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1659.158717] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1659.158912] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] f10fe64d-a09e-488a-b609-3e38922cf2e0 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1659.159408] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1723724f-77ce-4603-90fa-c506cfb3c73c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.166037] env[63379]: DEBUG oslo_vmware.api [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1659.166037] env[63379]: value = "task-1779703" [ 1659.166037] env[63379]: _type = "Task" [ 1659.166037] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.174689] env[63379]: DEBUG oslo_vmware.api [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.186852] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "8b33e64a-ea19-4974-8c2d-350615b1e061" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.187145] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.220150] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779696, 'name': ReconfigVM_Task, 'duration_secs': 0.36016} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.220703] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Reconfigured VM instance instance-00000044 to attach disk [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/f087b3ac-13e2-4e55-a3ce-5e6bd3379239.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1659.221329] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f17ddc20-518a-4e14-9bdb-9da2c4ce3f86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.231830] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1659.231830] env[63379]: value = "task-1779704" [ 1659.231830] env[63379]: _type = "Task" [ 1659.231830] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.232323] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f53dd295-a5d0-475b-8108-b8e87d867814 tempest-ServersTestFqdnHostnames-219247505 tempest-ServersTestFqdnHostnames-219247505-project-member] Lock "08465a2c-1ab6-4c53-9b12-3cd51c717b03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.895s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.239937] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095586} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.240561] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1659.241359] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2f846d-87d3-4805-aefe-b842721000d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.247059] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779704, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.269091] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1659.272796] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d36822d6-2f40-486b-96fd-23cddeac10c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.292764] env[63379]: DEBUG oslo_vmware.api [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779698, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19977} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.294034] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1659.294249] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1659.294473] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1659.294677] env[63379]: INFO nova.compute.manager [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1659.294942] env[63379]: DEBUG oslo.service.loopingcall [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.295240] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1659.295240] env[63379]: value = "task-1779705" [ 1659.295240] env[63379]: _type = "Task" [ 1659.295240] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.295420] env[63379]: DEBUG nova.compute.manager [-] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1659.295515] env[63379]: DEBUG nova.network.neutron [-] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1659.305483] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779705, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.594628] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779701, 'name': CreateVM_Task, 'duration_secs': 0.446993} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.594809] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1659.595675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.595941] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.596393] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1659.596677] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4960047e-93e0-4df1-8a3a-f44bf71ba6ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.602672] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1659.602672] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b10a58-9c55-4e9a-2a2c-19c4747eab7d" [ 1659.602672] env[63379]: _type = "Task" [ 1659.602672] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.615202] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b10a58-9c55-4e9a-2a2c-19c4747eab7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.680393] env[63379]: DEBUG oslo_vmware.api [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153291} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.680393] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1659.680393] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1659.680563] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1659.680709] env[63379]: INFO nova.compute.manager [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1659.682438] env[63379]: DEBUG oslo.service.loopingcall [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.682438] env[63379]: DEBUG nova.compute.manager [-] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1659.682438] env[63379]: DEBUG nova.network.neutron [-] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1659.689759] env[63379]: DEBUG nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1659.751591] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779704, 'name': Rename_Task, 'duration_secs': 0.1775} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.751894] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1659.752169] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dcebf93d-7ccd-4c63-bef8-0ccb3698c01b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.762183] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1659.762183] env[63379]: value = "task-1779706" [ 1659.762183] env[63379]: _type = "Task" [ 1659.762183] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.774119] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.810277] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779705, 'name': ReconfigVM_Task, 'duration_secs': 0.495521} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.811108] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31/758ade2c-7f75-4907-95d5-681d5792ae31.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1659.811459] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9939b1d5-2ead-4d49-ae35-a7dd3dcddd68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.818315] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1659.818315] env[63379]: value = "task-1779707" [ 1659.818315] env[63379]: _type = "Task" [ 1659.818315] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.834020] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779707, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.949437] env[63379]: DEBUG nova.network.neutron [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updated VIF entry in instance network info cache for port 6cdabd2b-f665-46a9-a86e-2527cfe452bf. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1659.949844] env[63379]: DEBUG nova.network.neutron [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [{"id": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "address": "fa:16:3e:bc:a5:55", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cdabd2b-f6", "ovs_interfaceid": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.091627] env[63379]: DEBUG nova.network.neutron [-] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.115381] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b10a58-9c55-4e9a-2a2c-19c4747eab7d, 'name': SearchDatastore_Task, 'duration_secs': 0.012724} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.115725] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.116134] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1660.116449] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.116662] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.116892] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1660.117542] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de76eca6-1401-44b5-a117-d6a6395bde25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.128897] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1660.129098] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1660.130036] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d71cb375-3fc3-4b7a-af40-3809ccb9927a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.135502] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1660.135502] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525da893-07fb-4d4b-77ea-aa0990aeaac1" [ 1660.135502] env[63379]: _type = "Task" [ 1660.135502] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.148106] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525da893-07fb-4d4b-77ea-aa0990aeaac1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.158166] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d861c15b-ee5b-4737-bcb8-6b9b6698d1ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.166572] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c90bf5-6022-479e-a27b-3d1a266921ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.205225] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32007675-4089-47fe-8596-ec3ef0aa2706 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.216377] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b444c0-9fc4-4ef3-a836-4f28d7c928bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.221924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.233470] env[63379]: DEBUG nova.compute.provider_tree [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.272646] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779706, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.329175] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779707, 'name': Rename_Task, 'duration_secs': 0.170907} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.329542] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1660.329863] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b508ece9-018f-4f71-8723-eae325c5113d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.337074] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1660.337074] env[63379]: value = "task-1779708" [ 1660.337074] env[63379]: _type = "Task" [ 1660.337074] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.344596] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.453916] env[63379]: DEBUG oslo_concurrency.lockutils [req-1f5e30ab-c7dd-4a7c-b92a-fffbd2950881 req-ed05e449-3b34-43ca-99e5-19501c9ec2f1 service nova] Releasing lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.599850] env[63379]: INFO nova.compute.manager [-] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Took 1.30 seconds to deallocate network for instance. [ 1660.648160] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525da893-07fb-4d4b-77ea-aa0990aeaac1, 'name': SearchDatastore_Task, 'duration_secs': 0.013962} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.648979] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23608a72-eeda-4e97-98b2-f5b1186beb3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.654320] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1660.654320] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522b994b-0a2c-ef07-e1a3-ec0b69cd53cc" [ 1660.654320] env[63379]: _type = "Task" [ 1660.654320] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.662539] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522b994b-0a2c-ef07-e1a3-ec0b69cd53cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.709122] env[63379]: DEBUG nova.network.neutron [-] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.736922] env[63379]: DEBUG nova.scheduler.client.report [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1660.783518] env[63379]: DEBUG oslo_vmware.api [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779706, 'name': PowerOnVM_Task, 'duration_secs': 0.580901} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.784066] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1660.784300] env[63379]: INFO nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1660.784492] env[63379]: DEBUG nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1660.785340] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136edfdd-2165-4f4d-8a40-94ac19ee3b9b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.847325] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779708, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.108187] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.117140] env[63379]: DEBUG nova.compute.manager [req-1f391829-5335-4ed2-853b-ee11a4f2f4d2 req-46b9d3cc-a098-4edb-b183-1974e89ff146 service nova] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Received event network-vif-deleted-bbe843e8-9156-454e-8ba4-dae6bc31c8b2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1661.117953] env[63379]: DEBUG nova.compute.manager [req-1f391829-5335-4ed2-853b-ee11a4f2f4d2 req-46b9d3cc-a098-4edb-b183-1974e89ff146 service nova] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Received event network-vif-deleted-fee236c0-9eaa-44e8-b51c-e97f6f003dad {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1661.167374] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522b994b-0a2c-ef07-e1a3-ec0b69cd53cc, 'name': SearchDatastore_Task, 'duration_secs': 0.012834} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.167832] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.168301] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 510db409-0b4c-494a-8084-39ef3cd6c918/510db409-0b4c-494a-8084-39ef3cd6c918.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1661.168813] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-607b10d1-e159-4675-af6b-d2187545c05d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.178138] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1661.178138] env[63379]: value = "task-1779709" [ 1661.178138] env[63379]: _type = "Task" [ 1661.178138] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.187978] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.211537] env[63379]: INFO nova.compute.manager [-] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Took 1.53 seconds to deallocate network for instance. [ 1661.243681] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.244482] env[63379]: DEBUG nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1661.247502] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.593s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.250330] env[63379]: INFO nova.compute.claims [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1661.285755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.285755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.286252] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.286252] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.286482] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.288904] env[63379]: INFO nova.compute.manager [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Terminating instance [ 1661.291031] env[63379]: DEBUG nova.compute.manager [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1661.291276] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1661.292193] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40754cf0-022c-4a5c-a1ff-df3e3eee4e3c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.303954] env[63379]: INFO nova.compute.manager [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Took 39.49 seconds to build instance. [ 1661.307290] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1661.307472] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5e44eb8-6429-4c59-99a4-b74cc01ab449 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.315735] env[63379]: DEBUG oslo_vmware.api [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1661.315735] env[63379]: value = "task-1779710" [ 1661.315735] env[63379]: _type = "Task" [ 1661.315735] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.325135] env[63379]: DEBUG oslo_vmware.api [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.348575] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779708, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.687523] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779709, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.723569] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.755072] env[63379]: DEBUG nova.compute.utils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1661.759486] env[63379]: DEBUG nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1661.759486] env[63379]: DEBUG nova.network.neutron [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1661.808849] env[63379]: DEBUG oslo_concurrency.lockutils [None req-723d8890-2905-4fde-8571-c5c6851dd8d9 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.816796] env[63379]: DEBUG nova.policy [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd786092d394d1a9b444051664ac7ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f28f4532d464e6eb90ab75799990c85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1661.828164] env[63379]: DEBUG oslo_vmware.api [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779710, 'name': PowerOffVM_Task, 'duration_secs': 0.227539} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.828527] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1661.828632] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1661.828893] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b883e8a-ef52-4d81-92db-828c7adb6302 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.847357] env[63379]: DEBUG oslo_vmware.api [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779708, 'name': PowerOnVM_Task, 'duration_secs': 1.066572} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.847626] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1661.847828] env[63379]: DEBUG nova.compute.manager [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1661.848629] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c73280-b3cc-451c-9b87-694cf4648d2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.045951] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1662.046293] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1662.046747] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Deleting the datastore file [datastore1] 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1662.046927] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3418d46-07d5-4b36-9b89-d3f9a496cca0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.056095] env[63379]: DEBUG oslo_vmware.api [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1662.056095] env[63379]: value = "task-1779712" [ 1662.056095] env[63379]: _type = "Task" [ 1662.056095] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.065714] env[63379]: DEBUG oslo_vmware.api [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.176880] env[63379]: DEBUG nova.network.neutron [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Successfully created port: 1a119dbf-427b-4b34-819c-d65a9f0f88a8 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1662.193227] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.665817} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.193227] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 510db409-0b4c-494a-8084-39ef3cd6c918/510db409-0b4c-494a-8084-39ef3cd6c918.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1662.193227] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1662.193227] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0426dfb3-4a7a-474e-9376-575a6ff20772 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.198710] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1662.198710] env[63379]: value = "task-1779713" [ 1662.198710] env[63379]: _type = "Task" [ 1662.198710] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.210488] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.262249] env[63379]: DEBUG nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1662.372371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.557233] env[63379]: INFO nova.compute.manager [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Rescuing [ 1662.557372] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.557584] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquired lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.557685] env[63379]: DEBUG nova.network.neutron [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1662.570246] env[63379]: DEBUG oslo_vmware.api [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.645955] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9be7b4d-7c41-4481-934f-22952440fbf2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.655122] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4605c430-37e3-4d3d-9bac-df83080dc7ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.685655] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168ad8e1-6673-455f-80f8-a58349ba3de3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.694791] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a17cc4-d229-4c1a-9575-37d780017734 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.710241] env[63379]: DEBUG nova.compute.provider_tree [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1662.716099] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779713, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.072785] env[63379]: DEBUG oslo_vmware.api [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.812801} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.073484] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1663.074577] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1663.074577] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1663.074577] env[63379]: INFO nova.compute.manager [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Took 1.78 seconds to destroy the instance on the hypervisor. [ 1663.074577] env[63379]: DEBUG oslo.service.loopingcall [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.074811] env[63379]: DEBUG nova.compute.manager [-] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1663.074903] env[63379]: DEBUG nova.network.neutron [-] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1663.212017] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.586859} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.213312] env[63379]: DEBUG nova.scheduler.client.report [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1663.217513] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1663.218567] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e25950-0e56-4be1-b555-212320f299e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.246531] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 510db409-0b4c-494a-8084-39ef3cd6c918/510db409-0b4c-494a-8084-39ef3cd6c918.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1663.247559] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3306bc1c-c2fe-4f83-a5a9-284375b317fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.272008] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1663.272008] env[63379]: value = "task-1779714" [ 1663.272008] env[63379]: _type = "Task" [ 1663.272008] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.277221] env[63379]: DEBUG nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1663.285865] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779714, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.307563] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1663.307797] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1663.308106] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1663.308215] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1663.308343] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1663.308492] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1663.308701] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1663.308857] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1663.309167] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1663.309375] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1663.309546] env[63379]: DEBUG nova.virt.hardware [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1663.310445] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b01dc9-3627-4569-92f8-f16967894a52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.318215] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ad22e2-0e9a-49b8-b5ac-b6609dfab1ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.505953] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.506276] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.506508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.506715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.506905] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.509074] env[63379]: INFO nova.compute.manager [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Terminating instance [ 1663.510810] env[63379]: DEBUG nova.compute.manager [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1663.511032] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1663.511868] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40eccbf4-dc09-437a-b5ac-c1e9f758d5a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.520828] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1663.521236] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04be94f3-f04f-4463-94b8-667819bef1d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.531203] env[63379]: DEBUG oslo_vmware.api [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1663.531203] env[63379]: value = "task-1779715" [ 1663.531203] env[63379]: _type = "Task" [ 1663.531203] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.539823] env[63379]: DEBUG oslo_vmware.api [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779715, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.692292] env[63379]: DEBUG nova.network.neutron [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [{"id": "ae27d114-783b-4d6d-89ea-22959da9b86f", "address": "fa:16:3e:2a:2d:02", "network": {"id": "f746cc0e-3c0e-4c9c-b2fc-2e87ec1838e1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1847805430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b124411aac0544d6834ff8f5c2b84bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae27d114-78", "ovs_interfaceid": "ae27d114-783b-4d6d-89ea-22959da9b86f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.723297] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.724241] env[63379]: DEBUG nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1663.727469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.961s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.728925] env[63379]: INFO nova.compute.claims [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1663.784297] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779714, 'name': ReconfigVM_Task, 'duration_secs': 0.296505} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.784841] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 510db409-0b4c-494a-8084-39ef3cd6c918/510db409-0b4c-494a-8084-39ef3cd6c918.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1663.785259] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88d4ac27-7381-4188-beb3-af567b41906b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.792451] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1663.792451] env[63379]: value = "task-1779716" [ 1663.792451] env[63379]: _type = "Task" [ 1663.792451] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.801656] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779716, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.896802] env[63379]: DEBUG nova.compute.manager [req-fb64c34b-782b-4cc9-b6b5-d60f2b524c93 req-57a8111f-e96e-47d0-816f-1b6f6474a15c service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Received event network-vif-plugged-1a119dbf-427b-4b34-819c-d65a9f0f88a8 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1663.897119] env[63379]: DEBUG oslo_concurrency.lockutils [req-fb64c34b-782b-4cc9-b6b5-d60f2b524c93 req-57a8111f-e96e-47d0-816f-1b6f6474a15c service nova] Acquiring lock "c1858f41-75e7-4eee-a6db-493e150622ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.897363] env[63379]: DEBUG oslo_concurrency.lockutils [req-fb64c34b-782b-4cc9-b6b5-d60f2b524c93 req-57a8111f-e96e-47d0-816f-1b6f6474a15c service nova] Lock "c1858f41-75e7-4eee-a6db-493e150622ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.897561] env[63379]: DEBUG oslo_concurrency.lockutils [req-fb64c34b-782b-4cc9-b6b5-d60f2b524c93 req-57a8111f-e96e-47d0-816f-1b6f6474a15c service nova] Lock "c1858f41-75e7-4eee-a6db-493e150622ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.897744] env[63379]: DEBUG nova.compute.manager [req-fb64c34b-782b-4cc9-b6b5-d60f2b524c93 req-57a8111f-e96e-47d0-816f-1b6f6474a15c service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] No waiting events found dispatching network-vif-plugged-1a119dbf-427b-4b34-819c-d65a9f0f88a8 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1663.897933] env[63379]: WARNING nova.compute.manager [req-fb64c34b-782b-4cc9-b6b5-d60f2b524c93 req-57a8111f-e96e-47d0-816f-1b6f6474a15c service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Received unexpected event network-vif-plugged-1a119dbf-427b-4b34-819c-d65a9f0f88a8 for instance with vm_state building and task_state spawning. [ 1663.899216] env[63379]: DEBUG nova.network.neutron [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Successfully updated port: 1a119dbf-427b-4b34-819c-d65a9f0f88a8 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1663.962242] env[63379]: DEBUG nova.compute.manager [req-3c281933-9cc9-44f7-880b-d62a345fdea0 req-1cf0ab44-9d27-42ee-861f-5bae12a9f851 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Received event network-vif-deleted-eb04ffa3-5012-4114-8150-3bc9329f9328 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1663.962242] env[63379]: INFO nova.compute.manager [req-3c281933-9cc9-44f7-880b-d62a345fdea0 req-1cf0ab44-9d27-42ee-861f-5bae12a9f851 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Neutron deleted interface eb04ffa3-5012-4114-8150-3bc9329f9328; detaching it from the instance and deleting it from the info cache [ 1663.962436] env[63379]: DEBUG nova.network.neutron [req-3c281933-9cc9-44f7-880b-d62a345fdea0 req-1cf0ab44-9d27-42ee-861f-5bae12a9f851 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.041556] env[63379]: DEBUG oslo_vmware.api [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779715, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.195494] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Releasing lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.239021] env[63379]: DEBUG nova.compute.utils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1664.239812] env[63379]: DEBUG nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1664.240282] env[63379]: DEBUG nova.network.neutron [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1664.251835] env[63379]: DEBUG nova.network.neutron [-] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.285380] env[63379]: DEBUG nova.policy [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991a93509b8943a693859488a56352b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928a9d102f0e45b897eae72fa566c0fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1664.303740] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779716, 'name': Rename_Task, 'duration_secs': 0.145095} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.304607] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1664.304718] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3965e94e-5225-44fd-9c01-d8151d8417bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.312501] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1664.312501] env[63379]: value = "task-1779717" [ 1664.312501] env[63379]: _type = "Task" [ 1664.312501] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.323525] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779717, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.403214] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-c1858f41-75e7-4eee-a6db-493e150622ef" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.403468] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-c1858f41-75e7-4eee-a6db-493e150622ef" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.403679] env[63379]: DEBUG nova.network.neutron [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1664.465793] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-055a4d4f-1c2d-4d3a-89a5-fd61abbe0245 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.478430] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e180f2-71ba-4524-a001-7a7dbb35ce51 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.519655] env[63379]: DEBUG nova.compute.manager [req-3c281933-9cc9-44f7-880b-d62a345fdea0 req-1cf0ab44-9d27-42ee-861f-5bae12a9f851 service nova] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Detach interface failed, port_id=eb04ffa3-5012-4114-8150-3bc9329f9328, reason: Instance 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1664.541143] env[63379]: DEBUG oslo_vmware.api [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779715, 'name': PowerOffVM_Task, 'duration_secs': 0.53182} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.541644] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1664.541910] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1664.542417] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef88eff0-f9c1-4818-9ab7-4c7d26c4b649 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.624903] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1664.624903] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1664.624903] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleting the datastore file [datastore1] acc8aa2f-41a8-4f06-8227-a1bae9c93f44 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1664.625346] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecb1a1a7-9722-4ee4-a0c5-74463ed596b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.633097] env[63379]: DEBUG oslo_vmware.api [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1664.633097] env[63379]: value = "task-1779719" [ 1664.633097] env[63379]: _type = "Task" [ 1664.633097] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.641897] env[63379]: DEBUG oslo_vmware.api [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.736331] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1664.736773] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a4dc097-b684-4670-be22-e807b1c96b6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.741150] env[63379]: DEBUG nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1664.751300] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1664.751300] env[63379]: value = "task-1779720" [ 1664.751300] env[63379]: _type = "Task" [ 1664.751300] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.756689] env[63379]: INFO nova.compute.manager [-] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Took 1.68 seconds to deallocate network for instance. [ 1664.770955] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.827169] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779717, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.874497] env[63379]: DEBUG nova.network.neutron [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Successfully created port: 9ab93b8c-54a2-4b4a-aaa1-4c931e56286d {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1664.976690] env[63379]: DEBUG nova.network.neutron [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1665.150206] env[63379]: DEBUG oslo_vmware.api [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314821} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.150489] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1665.150684] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1665.150872] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1665.151118] env[63379]: INFO nova.compute.manager [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1665.151313] env[63379]: DEBUG oslo.service.loopingcall [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1665.151530] env[63379]: DEBUG nova.compute.manager [-] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1665.151600] env[63379]: DEBUG nova.network.neutron [-] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1665.220463] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca4fb90-5e3f-49dc-b5ae-6feaff91fbe2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.230156] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1ae09d-1d13-4a00-8e84-ec907dd7a13f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.276690] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.283012] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f93e41d-25f2-4176-a0a8-f58cb7316f06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.289675] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779720, 'name': PowerOffVM_Task, 'duration_secs': 0.306135} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.293078] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1665.293932] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15213261-c823-4ad7-aed5-259129803681 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.299371] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e5cce4-e6c6-44a4-ab21-99b041e8fcce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.313802] env[63379]: DEBUG nova.compute.provider_tree [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1665.329539] env[63379]: DEBUG nova.network.neutron [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Updating instance_info_cache with network_info: [{"id": "1a119dbf-427b-4b34-819c-d65a9f0f88a8", "address": "fa:16:3e:c9:b9:0a", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a119dbf-42", "ovs_interfaceid": "1a119dbf-427b-4b34-819c-d65a9f0f88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.335117] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdaf069-24f4-4808-9282-bd957a2ab88a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.349622] env[63379]: DEBUG oslo_vmware.api [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1779717, 'name': PowerOnVM_Task, 'duration_secs': 0.666464} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.349901] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1665.350123] env[63379]: INFO nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Took 8.65 seconds to spawn the instance on the hypervisor. [ 1665.350313] env[63379]: DEBUG nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1665.351097] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f92479-db06-49be-b59d-bdca203e7119 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.383884] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1665.383884] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d04d9646-069b-4c5b-9244-e747402856d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.392029] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1665.392029] env[63379]: value = "task-1779721" [ 1665.392029] env[63379]: _type = "Task" [ 1665.392029] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.398859] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1665.398859] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1665.399111] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.399283] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.399477] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1665.401460] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6af44f88-5190-469b-bc47-ee6365adb869 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.418179] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1665.418337] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1665.419268] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb20b1a1-40db-4d33-a09c-215554562317 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.424783] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1665.424783] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5296bce0-4a5b-1386-c974-cc1a31d197bf" [ 1665.424783] env[63379]: _type = "Task" [ 1665.424783] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.432419] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5296bce0-4a5b-1386-c974-cc1a31d197bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.785847] env[63379]: DEBUG nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1665.826015] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1665.826778] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1665.827125] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1665.827457] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1665.827698] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1665.827916] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1665.828227] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1665.828544] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1665.828811] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1665.829551] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1665.830508] env[63379]: DEBUG nova.virt.hardware [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1665.831512] env[63379]: DEBUG nova.scheduler.client.report [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1665.836119] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d26eb0b-7e59-49af-8f40-fa159999064f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.842090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-c1858f41-75e7-4eee-a6db-493e150622ef" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.842090] env[63379]: DEBUG nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Instance network_info: |[{"id": "1a119dbf-427b-4b34-819c-d65a9f0f88a8", "address": "fa:16:3e:c9:b9:0a", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a119dbf-42", "ovs_interfaceid": "1a119dbf-427b-4b34-819c-d65a9f0f88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1665.842090] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:b9:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a119dbf-427b-4b34-819c-d65a9f0f88a8', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1665.848487] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating folder: Project (0f28f4532d464e6eb90ab75799990c85). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1665.849396] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d9b50d4-1f37-4700-9de3-31efe9339e49 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.858979] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c0bf43-01f5-4aee-8844-c679c03acd5c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.870125] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created folder: Project (0f28f4532d464e6eb90ab75799990c85) in parent group-v369214. [ 1665.870378] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating folder: Instances. Parent ref: group-v369413. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1665.872266] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb257262-fb9d-4467-bc26-3498fd0808ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.882430] env[63379]: INFO nova.compute.manager [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Took 40.00 seconds to build instance. [ 1665.892420] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created folder: Instances in parent group-v369413. [ 1665.892682] env[63379]: DEBUG oslo.service.loopingcall [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1665.892886] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1665.893121] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd6d7b21-22d1-45f7-98b5-eae96974e857 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.914702] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1665.914702] env[63379]: value = "task-1779724" [ 1665.914702] env[63379]: _type = "Task" [ 1665.914702] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.921355] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779724, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.937719] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5296bce0-4a5b-1386-c974-cc1a31d197bf, 'name': SearchDatastore_Task, 'duration_secs': 0.024283} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.939159] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6ada443-48e6-42fa-a7d8-60bddd7577bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.949041] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1665.949041] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5231188a-2d90-ebb6-2c82-7dcc7abefac8" [ 1665.949041] env[63379]: _type = "Task" [ 1665.949041] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.961163] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5231188a-2d90-ebb6-2c82-7dcc7abefac8, 'name': SearchDatastore_Task, 'duration_secs': 0.010781} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.961163] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.961163] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. {{(pid=63379) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1665.961163] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d85a0da1-87d9-4b67-9346-d86c9cba947c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.967300] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1665.967300] env[63379]: value = "task-1779725" [ 1665.967300] env[63379]: _type = "Task" [ 1665.967300] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.977111] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.085470] env[63379]: DEBUG nova.network.neutron [-] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.212760] env[63379]: DEBUG nova.compute.manager [req-68c142de-71ca-4f8d-9490-d6504c6c81ea req-84b54205-cbdf-4807-b210-757f53ca1a3d service nova] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Received event network-vif-deleted-af03ac28-a066-4ffd-ac52-33d4596db87d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1666.340325] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.341198] env[63379]: DEBUG nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1666.344041] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.206s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.345766] env[63379]: INFO nova.compute.claims [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1666.385656] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2aa95f0d-0bae-4aba-a852-b1c224b752bb tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "510db409-0b4c-494a-8084-39ef3cd6c918" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.513s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.425246] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779724, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.434277] env[63379]: DEBUG nova.compute.manager [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Received event network-changed-1a119dbf-427b-4b34-819c-d65a9f0f88a8 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1666.434485] env[63379]: DEBUG nova.compute.manager [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Refreshing instance network info cache due to event network-changed-1a119dbf-427b-4b34-819c-d65a9f0f88a8. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1666.434711] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] Acquiring lock "refresh_cache-c1858f41-75e7-4eee-a6db-493e150622ef" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.434858] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] Acquired lock "refresh_cache-c1858f41-75e7-4eee-a6db-493e150622ef" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.435107] env[63379]: DEBUG nova.network.neutron [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Refreshing network info cache for port 1a119dbf-427b-4b34-819c-d65a9f0f88a8 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1666.484570] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779725, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.503843] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "a7cce485-7476-4ea1-b127-68d879e164cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.504181] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "a7cce485-7476-4ea1-b127-68d879e164cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.588267] env[63379]: INFO nova.compute.manager [-] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Took 1.44 seconds to deallocate network for instance. [ 1666.850703] env[63379]: DEBUG nova.compute.utils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1666.857035] env[63379]: DEBUG nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1666.857035] env[63379]: DEBUG nova.network.neutron [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1666.884594] env[63379]: DEBUG nova.network.neutron [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Successfully updated port: 9ab93b8c-54a2-4b4a-aaa1-4c931e56286d {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1666.918110] env[63379]: DEBUG nova.policy [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c47bf588cb534238a8079bade73b55ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '16d0a53eb7ad4b349a5eab251e059dfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1666.928190] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779724, 'name': CreateVM_Task, 'duration_secs': 0.720882} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.928634] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1666.929630] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.929978] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.930499] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1666.931021] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93b2fe04-ccb6-4a29-8da7-de252ce19c54 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.936126] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1666.936126] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52204309-1f44-414e-b928-aaf97bc21f2e" [ 1666.936126] env[63379]: _type = "Task" [ 1666.936126] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.951831] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52204309-1f44-414e-b928-aaf97bc21f2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.979661] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601692} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.980248] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. [ 1666.981509] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983e4fda-d6d8-44b9-ac7c-80baf44502cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.012427] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1667.015820] env[63379]: DEBUG nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1667.020458] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbc17e53-018d-434f-97c6-69cf1895a91e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.038669] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1667.038669] env[63379]: value = "task-1779726" [ 1667.038669] env[63379]: _type = "Task" [ 1667.038669] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.049905] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779726, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.096179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.252652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "861cda26-f938-4b2e-ba3d-56b8469b6034" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.252652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.271534] env[63379]: DEBUG nova.network.neutron [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Successfully created port: 96c2a882-fdd3-4e25-92d0-a68dd0bcb811 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1667.357108] env[63379]: DEBUG nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1667.389259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.389259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.389259] env[63379]: DEBUG nova.network.neutron [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.408206] env[63379]: DEBUG nova.network.neutron [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Updated VIF entry in instance network info cache for port 1a119dbf-427b-4b34-819c-d65a9f0f88a8. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1667.408547] env[63379]: DEBUG nova.network.neutron [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Updating instance_info_cache with network_info: [{"id": "1a119dbf-427b-4b34-819c-d65a9f0f88a8", "address": "fa:16:3e:c9:b9:0a", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a119dbf-42", "ovs_interfaceid": "1a119dbf-427b-4b34-819c-d65a9f0f88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.448383] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52204309-1f44-414e-b928-aaf97bc21f2e, 'name': SearchDatastore_Task, 'duration_secs': 0.025664} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.448706] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.448949] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1667.449211] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.449368] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.449561] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1667.449830] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b742cfe-19c3-47f6-a327-672f0a995a0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.458539] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1667.458722] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1667.459490] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f84b2358-4307-4fb6-8b66-6140b46a2133 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.467010] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1667.467010] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527ee43c-094b-ab81-08aa-194642caf759" [ 1667.467010] env[63379]: _type = "Task" [ 1667.467010] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.474531] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527ee43c-094b-ab81-08aa-194642caf759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.552074] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779726, 'name': ReconfigVM_Task, 'duration_secs': 0.302986} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.557168] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Reconfigured VM instance instance-00000044 to attach disk [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1667.558646] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4bc6a0-daf8-40cc-b3f6-be502a1d9ef2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.562177] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.592618] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-780ed2aa-d7e4-4f3a-b499-a86ea4009f6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.609417] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1667.609417] env[63379]: value = "task-1779727" [ 1667.609417] env[63379]: _type = "Task" [ 1667.609417] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.619404] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779727, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.755019] env[63379]: DEBUG nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1667.870797] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada6eed7-2e4f-4279-8a21-f47d59c6eb6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.881021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93146a1-2d2b-4d34-91f2-3cce897adb0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.913884] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8bce153-f576-423d-a72d-5ab94197a65a req-47b59dae-7370-4acb-95f4-696f52b3c1d2 service nova] Releasing lock "refresh_cache-c1858f41-75e7-4eee-a6db-493e150622ef" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.915750] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c82626e-32d9-46d3-b354-5dc24d81c818 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.926180] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f76e15-acc5-4a3b-a0c6-ad22e0519557 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.944892] env[63379]: DEBUG nova.compute.provider_tree [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.947578] env[63379]: DEBUG nova.network.neutron [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1667.979660] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527ee43c-094b-ab81-08aa-194642caf759, 'name': SearchDatastore_Task, 'duration_secs': 0.011665} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.981259] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcf4a4c-943b-4268-a39f-b0aa49fbf935 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.986263] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1667.986263] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52699909-1541-0a08-1be0-9e2fa2456e81" [ 1667.986263] env[63379]: _type = "Task" [ 1667.986263] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.995357] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52699909-1541-0a08-1be0-9e2fa2456e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.120087] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.122282] env[63379]: DEBUG nova.network.neutron [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Updating instance_info_cache with network_info: [{"id": "9ab93b8c-54a2-4b4a-aaa1-4c931e56286d", "address": "fa:16:3e:46:8c:42", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ab93b8c-54", "ovs_interfaceid": "9ab93b8c-54a2-4b4a-aaa1-4c931e56286d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.275828] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.375380] env[63379]: DEBUG nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1668.401902] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1668.402262] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1668.402499] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.402778] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1668.403020] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.403257] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1668.403566] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1668.403818] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1668.404118] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1668.404411] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1668.404642] env[63379]: DEBUG nova.virt.hardware [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1668.405700] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7773f6-df97-4748-a78a-e8a9487e5c8c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.414134] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a93181d-9493-4911-8c47-68e996963d83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.452207] env[63379]: DEBUG nova.scheduler.client.report [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1668.499513] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52699909-1541-0a08-1be0-9e2fa2456e81, 'name': SearchDatastore_Task, 'duration_secs': 0.009808} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.499826] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.500219] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1668.500378] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-839b6367-bc9f-410f-9688-1f90e85202b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.508211] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1668.508211] env[63379]: value = "task-1779728" [ 1668.508211] env[63379]: _type = "Task" [ 1668.508211] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.516603] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779728, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.554826] env[63379]: DEBUG nova.compute.manager [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Received event network-vif-plugged-9ab93b8c-54a2-4b4a-aaa1-4c931e56286d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1668.555076] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Acquiring lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.555297] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.555487] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.555668] env[63379]: DEBUG nova.compute.manager [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] No waiting events found dispatching network-vif-plugged-9ab93b8c-54a2-4b4a-aaa1-4c931e56286d {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1668.555842] env[63379]: WARNING nova.compute.manager [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Received unexpected event network-vif-plugged-9ab93b8c-54a2-4b4a-aaa1-4c931e56286d for instance with vm_state building and task_state spawning. [ 1668.556073] env[63379]: DEBUG nova.compute.manager [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Received event network-changed-9ab93b8c-54a2-4b4a-aaa1-4c931e56286d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1668.556258] env[63379]: DEBUG nova.compute.manager [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Refreshing instance network info cache due to event network-changed-9ab93b8c-54a2-4b4a-aaa1-4c931e56286d. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1668.556434] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Acquiring lock "refresh_cache-fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.621342] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779727, 'name': ReconfigVM_Task, 'duration_secs': 0.851735} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.621646] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1668.621901] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b00ad692-6068-4112-b9ae-92763f02877d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.625080] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.625378] env[63379]: DEBUG nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Instance network_info: |[{"id": "9ab93b8c-54a2-4b4a-aaa1-4c931e56286d", "address": "fa:16:3e:46:8c:42", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ab93b8c-54", "ovs_interfaceid": "9ab93b8c-54a2-4b4a-aaa1-4c931e56286d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1668.625651] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Acquired lock "refresh_cache-fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.625829] env[63379]: DEBUG nova.network.neutron [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Refreshing network info cache for port 9ab93b8c-54a2-4b4a-aaa1-4c931e56286d {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1668.627157] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:8c:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ab93b8c-54a2-4b4a-aaa1-4c931e56286d', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1668.634893] env[63379]: DEBUG oslo.service.loopingcall [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.636904] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1668.637244] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1668.637244] env[63379]: value = "task-1779729" [ 1668.637244] env[63379]: _type = "Task" [ 1668.637244] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.637447] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01ab584d-1e0e-413d-b349-29f6829b02a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.663597] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779729, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.665352] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1668.665352] env[63379]: value = "task-1779730" [ 1668.665352] env[63379]: _type = "Task" [ 1668.665352] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.678636] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779730, 'name': CreateVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.867808] env[63379]: DEBUG nova.compute.manager [req-88ae1ecc-cbbd-4cfe-85c6-cc641acf34ac req-fec693d6-2ddf-4db4-945b-4e36946cecb9 service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Received event network-vif-plugged-96c2a882-fdd3-4e25-92d0-a68dd0bcb811 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1668.868129] env[63379]: DEBUG oslo_concurrency.lockutils [req-88ae1ecc-cbbd-4cfe-85c6-cc641acf34ac req-fec693d6-2ddf-4db4-945b-4e36946cecb9 service nova] Acquiring lock "266cc3d5-c10d-4367-a879-d170802495db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.868422] env[63379]: DEBUG oslo_concurrency.lockutils [req-88ae1ecc-cbbd-4cfe-85c6-cc641acf34ac req-fec693d6-2ddf-4db4-945b-4e36946cecb9 service nova] Lock "266cc3d5-c10d-4367-a879-d170802495db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.868652] env[63379]: DEBUG oslo_concurrency.lockutils [req-88ae1ecc-cbbd-4cfe-85c6-cc641acf34ac req-fec693d6-2ddf-4db4-945b-4e36946cecb9 service nova] Lock "266cc3d5-c10d-4367-a879-d170802495db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.868840] env[63379]: DEBUG nova.compute.manager [req-88ae1ecc-cbbd-4cfe-85c6-cc641acf34ac req-fec693d6-2ddf-4db4-945b-4e36946cecb9 service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] No waiting events found dispatching network-vif-plugged-96c2a882-fdd3-4e25-92d0-a68dd0bcb811 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1668.869264] env[63379]: WARNING nova.compute.manager [req-88ae1ecc-cbbd-4cfe-85c6-cc641acf34ac req-fec693d6-2ddf-4db4-945b-4e36946cecb9 service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Received unexpected event network-vif-plugged-96c2a882-fdd3-4e25-92d0-a68dd0bcb811 for instance with vm_state building and task_state spawning. [ 1668.962988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.963662] env[63379]: DEBUG nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1668.968972] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.439s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.974952] env[63379]: INFO nova.compute.claims [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1668.988372] env[63379]: DEBUG nova.network.neutron [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Successfully updated port: 96c2a882-fdd3-4e25-92d0-a68dd0bcb811 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1669.025682] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779728, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.162823] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779729, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.179281] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779730, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.368866] env[63379]: DEBUG nova.network.neutron [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Updated VIF entry in instance network info cache for port 9ab93b8c-54a2-4b4a-aaa1-4c931e56286d. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1669.369143] env[63379]: DEBUG nova.network.neutron [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Updating instance_info_cache with network_info: [{"id": "9ab93b8c-54a2-4b4a-aaa1-4c931e56286d", "address": "fa:16:3e:46:8c:42", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ab93b8c-54", "ovs_interfaceid": "9ab93b8c-54a2-4b4a-aaa1-4c931e56286d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.480489] env[63379]: DEBUG nova.compute.utils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1669.484257] env[63379]: DEBUG nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1669.484400] env[63379]: DEBUG nova.network.neutron [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1669.492434] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "refresh_cache-266cc3d5-c10d-4367-a879-d170802495db" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.492552] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquired lock "refresh_cache-266cc3d5-c10d-4367-a879-d170802495db" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.492624] env[63379]: DEBUG nova.network.neutron [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1669.518972] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779728, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.620534} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.519343] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1669.519580] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1669.519846] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f81dd83-5773-401e-8980-65bd7b30f911 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.523463] env[63379]: DEBUG nova.policy [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06eb1221300e4d969a2c7fc92d8dc3e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3562bb229474ba7aa3dae98def05260', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1669.526241] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1669.526241] env[63379]: value = "task-1779731" [ 1669.526241] env[63379]: _type = "Task" [ 1669.526241] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.534520] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.663777] env[63379]: DEBUG oslo_vmware.api [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779729, 'name': PowerOnVM_Task, 'duration_secs': 0.798959} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.665413] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1669.667498] env[63379]: DEBUG nova.compute.manager [None req-e67b3d23-47b5-4f0b-a384-a500a1ee9efc tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1669.668377] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2f5ab8-1024-48bc-9beb-44601771c49e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.683909] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779730, 'name': CreateVM_Task, 'duration_secs': 0.734109} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.684383] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1669.685090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.685271] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.685604] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1669.685869] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64868df5-c973-4d2a-b047-3013b9502cb0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.690800] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1669.690800] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52126eca-c305-a6b3-6794-608ad58ba3cc" [ 1669.690800] env[63379]: _type = "Task" [ 1669.690800] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.698874] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52126eca-c305-a6b3-6794-608ad58ba3cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.873597] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Releasing lock "refresh_cache-fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.874488] env[63379]: DEBUG nova.compute.manager [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Received event network-changed-6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1669.874488] env[63379]: DEBUG nova.compute.manager [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Refreshing instance network info cache due to event network-changed-6cdabd2b-f665-46a9-a86e-2527cfe452bf. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1669.874488] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Acquiring lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.874488] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Acquired lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.874790] env[63379]: DEBUG nova.network.neutron [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Refreshing network info cache for port 6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1669.885949] env[63379]: DEBUG nova.network.neutron [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Successfully created port: 7a48c3ef-9850-43b6-b138-d7cbb329face {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1669.985231] env[63379]: DEBUG nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1670.047836] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07188} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.049658] env[63379]: DEBUG nova.network.neutron [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1670.053202] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1670.057928] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d762fd-b717-4c35-a5f7-6c6d863ee1e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.095524] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1670.102324] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65230699-5b6f-45b2-9388-9c84a27f74d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.136512] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1670.136512] env[63379]: value = "task-1779732" [ 1670.136512] env[63379]: _type = "Task" [ 1670.136512] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.152885] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.204268] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52126eca-c305-a6b3-6794-608ad58ba3cc, 'name': SearchDatastore_Task, 'duration_secs': 0.044634} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.204793] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.204793] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1670.204912] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.205076] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.205919] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1670.205919] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93df80c6-ee89-4703-b2c5-d381f44e9647 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.220604] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1670.220800] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1670.221726] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83cdc0b8-f036-45b1-9f99-76edf91db4e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.230706] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1670.230706] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528741a3-cc3d-7f23-973a-db8551e8830d" [ 1670.230706] env[63379]: _type = "Task" [ 1670.230706] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.238842] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528741a3-cc3d-7f23-973a-db8551e8830d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.309150] env[63379]: DEBUG nova.network.neutron [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Updating instance_info_cache with network_info: [{"id": "96c2a882-fdd3-4e25-92d0-a68dd0bcb811", "address": "fa:16:3e:e0:2e:96", "network": {"id": "8abcb75b-485b-4382-bd64-575187b0ac82", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1541387469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d0a53eb7ad4b349a5eab251e059dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96c2a882-fd", "ovs_interfaceid": "96c2a882-fdd3-4e25-92d0-a68dd0bcb811", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.483435] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f852ca27-049a-476e-b6f1-e3989ecb4f52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.494882] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d002cd3-8a18-4592-8be1-e8730e02d075 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.534607] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bde445-50b1-4b55-9220-fc5abd80c962 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.543810] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c0e5d2-60a5-4d96-8288-f374a6da2560 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.560467] env[63379]: DEBUG nova.compute.provider_tree [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.648507] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779732, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.652854] env[63379]: DEBUG nova.network.neutron [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updated VIF entry in instance network info cache for port 6cdabd2b-f665-46a9-a86e-2527cfe452bf. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1670.652854] env[63379]: DEBUG nova.network.neutron [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [{"id": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "address": "fa:16:3e:bc:a5:55", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cdabd2b-f6", "ovs_interfaceid": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.742197] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528741a3-cc3d-7f23-973a-db8551e8830d, 'name': SearchDatastore_Task, 'duration_secs': 0.073745} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.743030] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf8da704-8dbd-499b-ab98-06c82c20f298 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.749097] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1670.749097] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52354e75-e168-67eb-5b08-cc86f56faec0" [ 1670.749097] env[63379]: _type = "Task" [ 1670.749097] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.757089] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52354e75-e168-67eb-5b08-cc86f56faec0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.814945] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Releasing lock "refresh_cache-266cc3d5-c10d-4367-a879-d170802495db" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.814945] env[63379]: DEBUG nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Instance network_info: |[{"id": "96c2a882-fdd3-4e25-92d0-a68dd0bcb811", "address": "fa:16:3e:e0:2e:96", "network": {"id": "8abcb75b-485b-4382-bd64-575187b0ac82", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1541387469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d0a53eb7ad4b349a5eab251e059dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96c2a882-fd", "ovs_interfaceid": "96c2a882-fdd3-4e25-92d0-a68dd0bcb811", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1670.817017] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:2e:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0e00b2f1-c70f-4b21-86eb-810643cc1680', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96c2a882-fdd3-4e25-92d0-a68dd0bcb811', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1670.823290] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Creating folder: Project (16d0a53eb7ad4b349a5eab251e059dfa). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1670.823611] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8b35191-dd82-4180-818d-8ecb15d49682 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.835937] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Created folder: Project (16d0a53eb7ad4b349a5eab251e059dfa) in parent group-v369214. [ 1670.836236] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Creating folder: Instances. Parent ref: group-v369417. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1670.836496] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-835efd32-5c41-46d4-a2ab-833c44d2de74 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.846932] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Created folder: Instances in parent group-v369417. [ 1670.847203] env[63379]: DEBUG oslo.service.loopingcall [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.847442] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1670.847667] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d2aded5-c9ce-4585-b81b-a892342296d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.869882] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1670.869882] env[63379]: value = "task-1779735" [ 1670.869882] env[63379]: _type = "Task" [ 1670.869882] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.878372] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779735, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.900181] env[63379]: DEBUG nova.compute.manager [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Received event network-changed-96c2a882-fdd3-4e25-92d0-a68dd0bcb811 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1670.900518] env[63379]: DEBUG nova.compute.manager [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Refreshing instance network info cache due to event network-changed-96c2a882-fdd3-4e25-92d0-a68dd0bcb811. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1670.900690] env[63379]: DEBUG oslo_concurrency.lockutils [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] Acquiring lock "refresh_cache-266cc3d5-c10d-4367-a879-d170802495db" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.900840] env[63379]: DEBUG oslo_concurrency.lockutils [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] Acquired lock "refresh_cache-266cc3d5-c10d-4367-a879-d170802495db" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.901049] env[63379]: DEBUG nova.network.neutron [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Refreshing network info cache for port 96c2a882-fdd3-4e25-92d0-a68dd0bcb811 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1671.002420] env[63379]: DEBUG nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1671.035471] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1671.035730] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1671.035892] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1671.036177] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1671.036380] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1671.036489] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1671.036704] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1671.036869] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1671.037124] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1671.037337] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1671.037518] env[63379]: DEBUG nova.virt.hardware [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1671.038399] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfa502b-1379-4a28-94a5-cd88e8ffc441 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.046803] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfb7d23-974a-47ee-aadd-0fe072ff6840 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.063744] env[63379]: DEBUG nova.scheduler.client.report [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1671.155647] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779732, 'name': ReconfigVM_Task, 'duration_secs': 0.534122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.155647] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Reconfigured VM instance instance-00000046 to attach disk [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1671.155647] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a265c774-5c31-4f49-9f87-9a219834f1da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.155647] env[63379]: DEBUG oslo_concurrency.lockutils [req-feecf124-a76c-41a0-9798-d84c70060a68 req-0fa54ac8-c070-413d-ad7a-a0b209018775 service nova] Releasing lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.162468] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1671.162468] env[63379]: value = "task-1779736" [ 1671.162468] env[63379]: _type = "Task" [ 1671.162468] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.172271] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779736, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.259785] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52354e75-e168-67eb-5b08-cc86f56faec0, 'name': SearchDatastore_Task, 'duration_secs': 0.010921} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.260049] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.260328] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f/fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1671.260594] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54520a2f-4fcb-4217-bd54-34774e3213a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.268677] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1671.268677] env[63379]: value = "task-1779737" [ 1671.268677] env[63379]: _type = "Task" [ 1671.268677] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.280625] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.384170] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779735, 'name': CreateVM_Task, 'duration_secs': 0.459705} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.384365] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1671.385352] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.385352] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.385656] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1671.385932] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28da5868-c47b-43df-b4c2-3ce667789618 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.391675] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1671.391675] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a48f79-6c42-6878-a648-198c304cdda6" [ 1671.391675] env[63379]: _type = "Task" [ 1671.391675] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.406591] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a48f79-6c42-6878-a648-198c304cdda6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.570289] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.570882] env[63379]: DEBUG nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1671.574679] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 29.353s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.672832] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779736, 'name': Rename_Task, 'duration_secs': 0.18707} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.675940] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1671.675940] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33c17b6d-c04c-45b3-8856-412c0876694a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.690399] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1671.690399] env[63379]: value = "task-1779738" [ 1671.690399] env[63379]: _type = "Task" [ 1671.690399] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.702577] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779738, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.703759] env[63379]: DEBUG nova.network.neutron [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Updated VIF entry in instance network info cache for port 96c2a882-fdd3-4e25-92d0-a68dd0bcb811. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1671.704291] env[63379]: DEBUG nova.network.neutron [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Updating instance_info_cache with network_info: [{"id": "96c2a882-fdd3-4e25-92d0-a68dd0bcb811", "address": "fa:16:3e:e0:2e:96", "network": {"id": "8abcb75b-485b-4382-bd64-575187b0ac82", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1541387469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "16d0a53eb7ad4b349a5eab251e059dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96c2a882-fd", "ovs_interfaceid": "96c2a882-fdd3-4e25-92d0-a68dd0bcb811", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.779773] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779737, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.817161] env[63379]: DEBUG nova.network.neutron [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Successfully updated port: 7a48c3ef-9850-43b6-b138-d7cbb329face {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1671.902723] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a48f79-6c42-6878-a648-198c304cdda6, 'name': SearchDatastore_Task, 'duration_secs': 0.00963} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.903062] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.903310] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1671.903553] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.903706] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.903894] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1671.904197] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3392841-11ac-4d8b-aae7-97b6fc2097e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.912968] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1671.913175] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1671.913940] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e3212dc-dcdd-4091-92e8-bb6e20359208 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.919655] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1671.919655] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e6e7ba-0d49-3a98-2b14-55793933e542" [ 1671.919655] env[63379]: _type = "Task" [ 1671.919655] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.928376] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e6e7ba-0d49-3a98-2b14-55793933e542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.078747] env[63379]: DEBUG nova.compute.utils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1672.080211] env[63379]: DEBUG nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1672.080422] env[63379]: DEBUG nova.network.neutron [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1672.141617] env[63379]: DEBUG nova.policy [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a52cb7db81d24a8faddcb40308665627', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50144e7fcb0642d7a1d1514f2233f555', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1672.199667] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779738, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.207334] env[63379]: DEBUG oslo_concurrency.lockutils [req-afa9b7db-1a3c-4e29-b461-a3a800768177 req-43b1598e-04cc-49f0-b47a-61a1569f654a service nova] Releasing lock "refresh_cache-266cc3d5-c10d-4367-a879-d170802495db" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.279728] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779737, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543419} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.280045] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f/fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1672.280248] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1672.280510] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76bf4f15-e05f-4f14-8cc8-d316aec9b48c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.290347] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1672.290347] env[63379]: value = "task-1779739" [ 1672.290347] env[63379]: _type = "Task" [ 1672.290347] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.298536] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779739, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.325020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.325020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.325020] env[63379]: DEBUG nova.network.neutron [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1672.430610] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e6e7ba-0d49-3a98-2b14-55793933e542, 'name': SearchDatastore_Task, 'duration_secs': 0.009507} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.431345] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed2379ea-3219-412a-a3e2-2b863683c533 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.436654] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1672.436654] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d25353-5775-6bdc-8e39-16766ffffc22" [ 1672.436654] env[63379]: _type = "Task" [ 1672.436654] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.444076] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d25353-5775-6bdc-8e39-16766ffffc22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.577016] env[63379]: DEBUG nova.network.neutron [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Successfully created port: 9b6c5265-06a3-4c47-b5cf-a656af402fa9 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1672.593261] env[63379]: DEBUG nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1672.622281] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 41952d7b-ce23-4e9b-8843-bbac1d3099c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.622466] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.622691] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623462] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 758ade2c-7f75-4907-95d5-681d5792ae31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623462] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 0324da80-b97c-4dc9-9083-199fbda60341 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623462] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f10fe64d-a09e-488a-b609-3e38922cf2e0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1672.623462] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d2de9da-9dfe-42d2-b206-bb5139b1970b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623462] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623462] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d76a28f-822d-4b4f-be2f-2ad3371b3979 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623689] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f983d089-7cfc-46a5-8f8d-f49f67aef1da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623689] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 38be0e8d-188b-4a98-aedc-5d941b63c000 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623751] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance ec1f7a44-7344-43fb-9d51-688731d8ce14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.623862] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f082cdd7-228e-4100-b301-5af6daea9b36 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1672.623991] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 19a41941-0679-4971-8a44-c95b13f5c294 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.627280] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 5c4ae6c6-538a-4724-ad77-340d9c60c24a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.627456] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance acc8aa2f-41a8-4f06-8227-a1bae9c93f44 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1672.627619] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance b91a5b89-0456-431d-b099-adda3a6b3024 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1672.627755] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 5aad86f8-0b3b-43ca-982b-c670e3411c01 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1672.627882] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1672.627999] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f087b3ac-13e2-4e55-a3ce-5e6bd3379239 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.628130] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 510db409-0b4c-494a-8084-39ef3cd6c918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.628243] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance c1858f41-75e7-4eee-a6db-493e150622ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.628356] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.628464] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 266cc3d5-c10d-4367-a879-d170802495db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.628572] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance da66c3d9-ca03-4113-8703-64b666628936 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.628680] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1672.702233] env[63379]: DEBUG oslo_vmware.api [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779738, 'name': PowerOnVM_Task, 'duration_secs': 0.733893} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.702233] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1672.702456] env[63379]: INFO nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Took 9.43 seconds to spawn the instance on the hypervisor. [ 1672.702654] env[63379]: DEBUG nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1672.703457] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a533dfe-c189-45f3-8015-e1dca399b410 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.800747] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779739, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085108} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.801015] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1672.801797] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813147b7-90fe-4d99-94f9-83d0594fada1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.825426] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f/fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1672.827810] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8f095e9-fb1d-4445-94ab-d68153098209 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.847958] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1672.847958] env[63379]: value = "task-1779740" [ 1672.847958] env[63379]: _type = "Task" [ 1672.847958] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.856141] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.893656] env[63379]: DEBUG nova.network.neutron [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1672.952533] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d25353-5775-6bdc-8e39-16766ffffc22, 'name': SearchDatastore_Task, 'duration_secs': 0.030948} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.953061] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.953446] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 266cc3d5-c10d-4367-a879-d170802495db/266cc3d5-c10d-4367-a879-d170802495db.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1672.953795] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-273b210f-bd51-4f64-94a3-814252c9e541 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.961868] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1672.961868] env[63379]: value = "task-1779741" [ 1672.961868] env[63379]: _type = "Task" [ 1672.961868] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.973671] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.065783] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1673.066042] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing instance network info cache due to event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1673.066752] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Acquiring lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.066752] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Acquired lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.066920] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1673.077848] env[63379]: DEBUG nova.network.neutron [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Updating instance_info_cache with network_info: [{"id": "7a48c3ef-9850-43b6-b138-d7cbb329face", "address": "fa:16:3e:d8:c3:c8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a48c3ef-98", "ovs_interfaceid": "7a48c3ef-9850-43b6-b138-d7cbb329face", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.131762] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 8b33e64a-ea19-4974-8c2d-350615b1e061 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1673.223562] env[63379]: INFO nova.compute.manager [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Took 41.47 seconds to build instance. [ 1673.365458] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.475233] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.586158] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.586158] env[63379]: DEBUG nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Instance network_info: |[{"id": "7a48c3ef-9850-43b6-b138-d7cbb329face", "address": "fa:16:3e:d8:c3:c8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a48c3ef-98", "ovs_interfaceid": "7a48c3ef-9850-43b6-b138-d7cbb329face", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1673.586158] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:c3:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f267bcdd-0daa-4337-9709-5fc060c267d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a48c3ef-9850-43b6-b138-d7cbb329face', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.594027] env[63379]: DEBUG oslo.service.loopingcall [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.594684] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da66c3d9-ca03-4113-8703-64b666628936] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1673.595034] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7c33405-dde4-4a71-8fb8-776dd7117515 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.618581] env[63379]: DEBUG nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1673.627605] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.627605] env[63379]: value = "task-1779742" [ 1673.627605] env[63379]: _type = "Task" [ 1673.627605] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.642287] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a7cce485-7476-4ea1-b127-68d879e164cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1673.646414] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779742, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.658620] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1673.658882] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1673.659716] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1673.660096] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1673.660291] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1673.660490] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1673.660788] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1673.661097] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1673.661263] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1673.661436] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1673.661613] env[63379]: DEBUG nova.virt.hardware [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1673.662753] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2f9065-580f-49ea-82e7-77878020f385 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.672705] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65748775-c902-4dc2-be21-a746bc7a2e37 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.725633] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e99cea5e-ed35-4e18-9528-02f301f5c8c8 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "c1858f41-75e7-4eee-a6db-493e150622ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.980s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.861748] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779740, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.900940] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updated VIF entry in instance network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1673.901373] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [{"id": "ae27d114-783b-4d6d-89ea-22959da9b86f", "address": "fa:16:3e:2a:2d:02", "network": {"id": "f746cc0e-3c0e-4c9c-b2fc-2e87ec1838e1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1847805430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b124411aac0544d6834ff8f5c2b84bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae27d114-78", "ovs_interfaceid": "ae27d114-783b-4d6d-89ea-22959da9b86f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.980415] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779741, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.138121] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779742, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.149851] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 861cda26-f938-4b2e-ba3d-56b8469b6034 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1674.150175] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1674.150399] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1674.366172] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779740, 'name': ReconfigVM_Task, 'duration_secs': 1.070144} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.366172] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Reconfigured VM instance instance-00000047 to attach disk [datastore1] fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f/fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1674.367460] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76103ddc-119e-45eb-bfcf-d7bcaeb0f85d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.376020] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1674.376020] env[63379]: value = "task-1779743" [ 1674.376020] env[63379]: _type = "Task" [ 1674.376020] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.385707] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779743, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.405207] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Releasing lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.405207] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Received event network-vif-plugged-7a48c3ef-9850-43b6-b138-d7cbb329face {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1674.405812] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Acquiring lock "da66c3d9-ca03-4113-8703-64b666628936-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.406230] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Lock "da66c3d9-ca03-4113-8703-64b666628936-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.406570] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Lock "da66c3d9-ca03-4113-8703-64b666628936-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.408128] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] No waiting events found dispatching network-vif-plugged-7a48c3ef-9850-43b6-b138-d7cbb329face {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1674.408128] env[63379]: WARNING nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Received unexpected event network-vif-plugged-7a48c3ef-9850-43b6-b138-d7cbb329face for instance with vm_state building and task_state spawning. [ 1674.408128] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1674.408128] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing instance network info cache due to event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1674.408128] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Acquiring lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.408128] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Acquired lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.408128] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1674.486531] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779741, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.051671} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.486531] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 266cc3d5-c10d-4367-a879-d170802495db/266cc3d5-c10d-4367-a879-d170802495db.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1674.486896] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1674.487057] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1bb98b6-3902-4a0c-bf56-4ca30078f5ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.496238] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1674.496238] env[63379]: value = "task-1779744" [ 1674.496238] env[63379]: _type = "Task" [ 1674.496238] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.505865] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.571501] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6208d1e-75df-4aa7-9438-b3de899ef47e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.579909] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6897f877-0c6a-4be9-9d34-fa7962cd9b95 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.613409] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf42f91-0d66-4586-82be-97aa97ab33fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.626961] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f679c469-52f6-40a9-8f57-9c045f6339fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.641787] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779742, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.650485] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1674.711064] env[63379]: DEBUG nova.network.neutron [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Successfully updated port: 9b6c5265-06a3-4c47-b5cf-a656af402fa9 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1674.761565] env[63379]: INFO nova.compute.manager [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Rebuilding instance [ 1674.801730] env[63379]: DEBUG nova.compute.manager [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1674.802618] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb06a0f-bb4c-437c-92a8-59196ad515b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.884408] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779743, 'name': Rename_Task, 'duration_secs': 0.268027} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.884693] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1674.884966] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3168ef48-1a1b-4359-af73-69c4eef8782d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.892060] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1674.892060] env[63379]: value = "task-1779745" [ 1674.892060] env[63379]: _type = "Task" [ 1674.892060] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.900594] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779745, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.007902] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.20723} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.010506] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1675.011869] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5efe9ff-9140-4997-bc1a-3a8d2b5a2639 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.034604] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 266cc3d5-c10d-4367-a879-d170802495db/266cc3d5-c10d-4367-a879-d170802495db.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.038375] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45e57a71-6d0a-41ba-9629-10342c8c4a70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.062531] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1675.062531] env[63379]: value = "task-1779746" [ 1675.062531] env[63379]: _type = "Task" [ 1675.062531] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.072412] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779746, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.144755] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779742, 'name': CreateVM_Task, 'duration_secs': 1.042131} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.144913] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da66c3d9-ca03-4113-8703-64b666628936] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1675.145785] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.145785] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.147039] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1675.147452] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c21dad57-ae87-455e-baa1-e61e6be6255f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.158900] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1675.158900] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ac2e17-73e1-ae20-eb09-36b54a0a3c7b" [ 1675.158900] env[63379]: _type = "Task" [ 1675.158900] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.168101] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ac2e17-73e1-ae20-eb09-36b54a0a3c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.177876] env[63379]: ERROR nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] [req-44a44d57-a172-4623-be5a-245d5765d9bd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-44a44d57-a172-4623-be5a-245d5765d9bd"}]} [ 1675.196146] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1675.209026] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1675.209294] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1675.221024] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1675.221024] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing instance network info cache due to event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1675.221024] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Acquiring lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.221024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "refresh_cache-7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.221024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "refresh_cache-7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.221024] env[63379]: DEBUG nova.network.neutron [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1675.221024] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1675.233814] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updated VIF entry in instance network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1675.234284] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [{"id": "ae27d114-783b-4d6d-89ea-22959da9b86f", "address": "fa:16:3e:2a:2d:02", "network": {"id": "f746cc0e-3c0e-4c9c-b2fc-2e87ec1838e1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1847805430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b124411aac0544d6834ff8f5c2b84bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae27d114-78", "ovs_interfaceid": "ae27d114-783b-4d6d-89ea-22959da9b86f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.242506] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1675.316750] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1675.317408] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8984d90e-0f3d-4153-9efb-023b130b1b3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.325137] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1675.325137] env[63379]: value = "task-1779747" [ 1675.325137] env[63379]: _type = "Task" [ 1675.325137] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.335821] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.404491] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779745, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.557138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.557443] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.557660] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.557848] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.558034] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.560039] env[63379]: INFO nova.compute.manager [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Terminating instance [ 1675.561963] env[63379]: DEBUG nova.compute.manager [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1675.562180] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1675.563042] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ce55dc-3d33-4b36-aa2e-6af25b3346b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.577473] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779746, 'name': ReconfigVM_Task, 'duration_secs': 0.449591} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.578676] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 266cc3d5-c10d-4367-a879-d170802495db/266cc3d5-c10d-4367-a879-d170802495db.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1675.579398] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1675.581855] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-218f02c7-ce75-46c7-9fe1-00361a71cded {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.583393] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-305186f0-3b9f-4b05-befb-518e2f519a8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.590696] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1675.590696] env[63379]: value = "task-1779748" [ 1675.590696] env[63379]: _type = "Task" [ 1675.590696] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.591626] env[63379]: DEBUG oslo_vmware.api [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1675.591626] env[63379]: value = "task-1779749" [ 1675.591626] env[63379]: _type = "Task" [ 1675.591626] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.599035] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef692c2f-bf23-4db6-bbf6-5108657a0887 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.609365] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779748, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.612779] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9031af86-55c0-4d2a-8adf-636ddec8cae8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.616424] env[63379]: DEBUG oslo_vmware.api [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779749, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.654610] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3786a605-a3a5-489b-ba06-23278db1c8bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.670176] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c3edb6-2263-4eb0-a264-5f16f7293e2f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.678755] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ac2e17-73e1-ae20-eb09-36b54a0a3c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.023783} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.679713] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.680084] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1675.680372] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.680602] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.680896] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1675.681288] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7fb7fc8-bd13-45c7-892a-abf0989c9564 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.696565] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.706890] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1675.707135] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1675.709095] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cbae1c1-c104-4960-b033-b765d58718bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.715538] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1675.715538] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527709a7-b4c0-10e7-fb7b-1bd0bc241c96" [ 1675.715538] env[63379]: _type = "Task" [ 1675.715538] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.723993] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527709a7-b4c0-10e7-fb7b-1bd0bc241c96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.736948] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Releasing lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.737280] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Received event network-changed-7a48c3ef-9850-43b6-b138-d7cbb329face {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1675.737428] env[63379]: DEBUG nova.compute.manager [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Refreshing instance network info cache due to event network-changed-7a48c3ef-9850-43b6-b138-d7cbb329face. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1675.737640] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Acquiring lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.737788] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Acquired lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.737951] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Refreshing network info cache for port 7a48c3ef-9850-43b6-b138-d7cbb329face {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1675.739329] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Acquired lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.739512] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1675.770650] env[63379]: DEBUG nova.network.neutron [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1675.835877] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779747, 'name': PowerOffVM_Task, 'duration_secs': 0.296823} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.836197] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1675.836521] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1675.837310] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d262e1f9-3423-4d24-afd7-50dd52e111eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.847385] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1675.847665] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afccd0ef-1f54-42cb-927f-91be29f4ff4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.906398] env[63379]: DEBUG oslo_vmware.api [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779745, 'name': PowerOnVM_Task, 'duration_secs': 0.82905} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.906761] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1675.907097] env[63379]: INFO nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Took 10.12 seconds to spawn the instance on the hypervisor. [ 1675.907319] env[63379]: DEBUG nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1675.908105] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9147a410-fe53-4288-ba16-56f60d2c1161 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.948961] env[63379]: DEBUG nova.network.neutron [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Updating instance_info_cache with network_info: [{"id": "9b6c5265-06a3-4c47-b5cf-a656af402fa9", "address": "fa:16:3e:fe:de:da", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b6c5265-06", "ovs_interfaceid": "9b6c5265-06a3-4c47-b5cf-a656af402fa9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.103746] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779748, 'name': Rename_Task, 'duration_secs': 0.163741} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.106822] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1676.107176] env[63379]: DEBUG oslo_vmware.api [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779749, 'name': PowerOffVM_Task, 'duration_secs': 0.325698} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.107395] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49ef0de1-7930-493a-a927-72ae687b9762 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.108852] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1676.109048] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1676.109285] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-993b5b60-4497-4efd-a70c-032720ccfa7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.116838] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1676.116838] env[63379]: value = "task-1779751" [ 1676.116838] env[63379]: _type = "Task" [ 1676.116838] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.125119] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.201025] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1676.224894] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1676.226538] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1676.226538] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleting the datastore file [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.231025] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da2c6050-02d7-49ec-90a0-6e35c3b24d38 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.234075] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527709a7-b4c0-10e7-fb7b-1bd0bc241c96, 'name': SearchDatastore_Task, 'duration_secs': 0.011807} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.235082] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1676.235333] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1676.235579] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Deleting the datastore file [datastore1] f087b3ac-13e2-4e55-a3ce-5e6bd3379239 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.236189] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d248d92-8e3b-4571-9f98-965e0e6920cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.238492] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d71ae47e-a7e2-4d8d-a3f2-4a27f46f2729 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.245179] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1676.245179] env[63379]: value = "task-1779753" [ 1676.245179] env[63379]: _type = "Task" [ 1676.245179] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.251178] env[63379]: DEBUG oslo_vmware.api [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for the task: (returnval){ [ 1676.251178] env[63379]: value = "task-1779754" [ 1676.251178] env[63379]: _type = "Task" [ 1676.251178] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.251531] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1676.251531] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52de18d9-c763-f64c-5bb1-49ef7430e3c6" [ 1676.251531] env[63379]: _type = "Task" [ 1676.251531] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.261015] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.266345] env[63379]: DEBUG oslo_vmware.api [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779754, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.269512] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52de18d9-c763-f64c-5bb1-49ef7430e3c6, 'name': SearchDatastore_Task, 'duration_secs': 0.010761} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.269928] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.270013] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] da66c3d9-ca03-4113-8703-64b666628936/da66c3d9-ca03-4113-8703-64b666628936.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1676.270283] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7bb67e5-2ad6-405e-a932-d228259a92a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.276878] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1676.276878] env[63379]: value = "task-1779755" [ 1676.276878] env[63379]: _type = "Task" [ 1676.276878] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.285189] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.428973] env[63379]: INFO nova.compute.manager [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Took 39.80 seconds to build instance. [ 1676.452456] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "refresh_cache-7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.452684] env[63379]: DEBUG nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Instance network_info: |[{"id": "9b6c5265-06a3-4c47-b5cf-a656af402fa9", "address": "fa:16:3e:fe:de:da", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b6c5265-06", "ovs_interfaceid": "9b6c5265-06a3-4c47-b5cf-a656af402fa9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1676.453107] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:de:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b6c5265-06a3-4c47-b5cf-a656af402fa9', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1676.462027] env[63379]: DEBUG oslo.service.loopingcall [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1676.462027] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1676.462027] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89bf14f5-32c5-4626-b1be-a389797f3ad5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.478717] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Updated VIF entry in instance network info cache for port 7a48c3ef-9850-43b6-b138-d7cbb329face. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1676.479089] env[63379]: DEBUG nova.network.neutron [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Updating instance_info_cache with network_info: [{"id": "7a48c3ef-9850-43b6-b138-d7cbb329face", "address": "fa:16:3e:d8:c3:c8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a48c3ef-98", "ovs_interfaceid": "7a48c3ef-9850-43b6-b138-d7cbb329face", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.485463] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1676.485463] env[63379]: value = "task-1779756" [ 1676.485463] env[63379]: _type = "Task" [ 1676.485463] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.486317] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updated VIF entry in instance network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1676.486709] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [{"id": "ae27d114-783b-4d6d-89ea-22959da9b86f", "address": "fa:16:3e:2a:2d:02", "network": {"id": "f746cc0e-3c0e-4c9c-b2fc-2e87ec1838e1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1847805430-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b124411aac0544d6834ff8f5c2b84bd5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae27d114-78", "ovs_interfaceid": "ae27d114-783b-4d6d-89ea-22959da9b86f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.497239] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779756, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.632212] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779751, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.707357] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1676.707597] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.133s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.707900] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.054s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.708108] env[63379]: DEBUG nova.objects.instance [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1676.759909] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201072} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.761915] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1676.761915] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1676.762185] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1676.768858] env[63379]: DEBUG oslo_vmware.api [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Task: {'id': task-1779754, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.320005} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.769280] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1676.769470] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1676.769654] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1676.769844] env[63379]: INFO nova.compute.manager [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1676.770303] env[63379]: DEBUG oslo.service.loopingcall [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1676.770303] env[63379]: DEBUG nova.compute.manager [-] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1676.770629] env[63379]: DEBUG nova.network.neutron [-] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1676.786463] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779755, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.931350] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a7a79a-8389-48dc-a407-710a09a89064 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.310s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.982250] env[63379]: DEBUG oslo_concurrency.lockutils [req-848e8134-5652-4701-bb3c-74779e8b0c4b req-0d1be9fe-a6b3-4d52-bac5-82a8d0114acd service nova] Releasing lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.993900] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Releasing lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.994965] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1676.996915] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing instance network info cache due to event network-changed-ae27d114-783b-4d6d-89ea-22959da9b86f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1676.996915] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Acquiring lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.996915] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Acquired lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.996915] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Refreshing network info cache for port ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1677.005690] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779756, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.130972] env[63379]: DEBUG oslo_vmware.api [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779751, 'name': PowerOnVM_Task, 'duration_secs': 0.630748} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.131357] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1677.131564] env[63379]: INFO nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1677.131747] env[63379]: DEBUG nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1677.132657] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5510f1-7f67-49e4-8376-4ba37eb6c5b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.158436] env[63379]: DEBUG nova.compute.manager [req-529f40b7-241b-4b91-84d8-f4c8d745d18b req-c02f2813-343b-4da7-a0c2-c5ed527d5f2c service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Received event network-vif-deleted-ae27d114-783b-4d6d-89ea-22959da9b86f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1677.158652] env[63379]: INFO nova.compute.manager [req-529f40b7-241b-4b91-84d8-f4c8d745d18b req-c02f2813-343b-4da7-a0c2-c5ed527d5f2c service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Neutron deleted interface ae27d114-783b-4d6d-89ea-22959da9b86f; detaching it from the instance and deleting it from the info cache [ 1677.158824] env[63379]: DEBUG nova.network.neutron [req-529f40b7-241b-4b91-84d8-f4c8d745d18b req-c02f2813-343b-4da7-a0c2-c5ed527d5f2c service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.287203] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570103} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.287203] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] da66c3d9-ca03-4113-8703-64b666628936/da66c3d9-ca03-4113-8703-64b666628936.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1677.287203] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1677.287470] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ba89ff4-a97c-4370-9071-355a3321ff25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.294246] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1677.294246] env[63379]: value = "task-1779757" [ 1677.294246] env[63379]: _type = "Task" [ 1677.294246] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.302349] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.500761] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779756, 'name': CreateVM_Task, 'duration_secs': 0.726071} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.500934] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1677.501864] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.502048] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.502386] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1677.502673] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae98eaad-370c-4bcc-b74b-865da3926d35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.507209] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1677.507209] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527d8df5-1376-2cab-5f9d-02ab4f48494b" [ 1677.507209] env[63379]: _type = "Task" [ 1677.507209] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.515349] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527d8df5-1376-2cab-5f9d-02ab4f48494b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.516102] env[63379]: INFO nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Port ae27d114-783b-4d6d-89ea-22959da9b86f from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1677.516324] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.559438] env[63379]: DEBUG nova.network.neutron [-] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.650898] env[63379]: INFO nova.compute.manager [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Took 40.91 seconds to build instance. [ 1677.661869] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31d9d605-bdaf-4d32-856c-834a05f19183 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.671985] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0465d7b-28e0-4aa9-8c3f-cdeb42f76292 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.707665] env[63379]: DEBUG nova.compute.manager [req-529f40b7-241b-4b91-84d8-f4c8d745d18b req-c02f2813-343b-4da7-a0c2-c5ed527d5f2c service nova] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Detach interface failed, port_id=ae27d114-783b-4d6d-89ea-22959da9b86f, reason: Instance f087b3ac-13e2-4e55-a3ce-5e6bd3379239 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1677.719982] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4ab3ea6-043f-4d1c-a800-765775ba880d tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.721074] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.071s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.721271] env[63379]: DEBUG nova.objects.instance [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1677.800563] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1677.800819] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1677.800981] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1677.801187] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1677.801341] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1677.801489] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1677.801694] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1677.801857] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1677.802036] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1677.802209] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1677.802383] env[63379]: DEBUG nova.virt.hardware [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1677.803212] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a8612b-f525-4e7c-8ecf-fdaffd1fcd97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.810529] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.288628} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.812568] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1677.813280] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716120f4-8800-439c-aa99-be42c488a482 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.816717] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0956262-29d3-48fd-9c65-6eece42e84de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.847727] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] da66c3d9-ca03-4113-8703-64b666628936/da66c3d9-ca03-4113-8703-64b666628936.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1677.848171] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:b9:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a119dbf-427b-4b34-819c-d65a9f0f88a8', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1677.855193] env[63379]: DEBUG oslo.service.loopingcall [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1677.855512] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac803331-a249-420c-b8c0-99dcdf76c5ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.869576] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1677.869734] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76d9580a-aff1-46fd-97eb-ed9668d98ab6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.888638] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1677.888638] env[63379]: value = "task-1779759" [ 1677.888638] env[63379]: _type = "Task" [ 1677.888638] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.889724] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1677.889724] env[63379]: value = "task-1779758" [ 1677.889724] env[63379]: _type = "Task" [ 1677.889724] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.899810] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779759, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.902489] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779758, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.019057] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Releasing lock "refresh_cache-f087b3ac-13e2-4e55-a3ce-5e6bd3379239" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.019434] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Received event network-vif-plugged-9b6c5265-06a3-4c47-b5cf-a656af402fa9 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1678.019838] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Acquiring lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.019838] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.020014] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.020200] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] No waiting events found dispatching network-vif-plugged-9b6c5265-06a3-4c47-b5cf-a656af402fa9 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1678.020379] env[63379]: WARNING nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Received unexpected event network-vif-plugged-9b6c5265-06a3-4c47-b5cf-a656af402fa9 for instance with vm_state building and task_state spawning. [ 1678.020548] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Received event network-changed-9b6c5265-06a3-4c47-b5cf-a656af402fa9 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1678.020708] env[63379]: DEBUG nova.compute.manager [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Refreshing instance network info cache due to event network-changed-9b6c5265-06a3-4c47-b5cf-a656af402fa9. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1678.020890] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Acquiring lock "refresh_cache-7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.021049] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Acquired lock "refresh_cache-7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.021217] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Refreshing network info cache for port 9b6c5265-06a3-4c47-b5cf-a656af402fa9 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1678.022553] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527d8df5-1376-2cab-5f9d-02ab4f48494b, 'name': SearchDatastore_Task, 'duration_secs': 0.072655} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.023240] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.023504] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1678.023767] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.023973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.024191] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1678.024485] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf9e1750-790b-4561-91f6-6439b7c89f25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.035886] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1678.036200] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1678.036935] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5223bd5b-42f5-4047-a8f7-40a02f342077 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.043193] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1678.043193] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3aebc-7f72-f3ea-dcfa-7ebef9397d9e" [ 1678.043193] env[63379]: _type = "Task" [ 1678.043193] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.050796] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3aebc-7f72-f3ea-dcfa-7ebef9397d9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.060659] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.060901] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.064232] env[63379]: INFO nova.compute.manager [-] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Took 1.29 seconds to deallocate network for instance. [ 1678.153154] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9b93a93b-dd73-480f-852a-2f0d05fc74f3 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "266cc3d5-c10d-4367-a879-d170802495db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.421s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.402396] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779758, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.405990] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779759, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.554421] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a3aebc-7f72-f3ea-dcfa-7ebef9397d9e, 'name': SearchDatastore_Task, 'duration_secs': 0.05297} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.555280] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8200c567-e322-4256-a366-763c7b0597fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.563489] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1678.563489] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5285bdc4-1918-a180-7afc-7153ecc682de" [ 1678.563489] env[63379]: _type = "Task" [ 1678.563489] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.567512] env[63379]: DEBUG nova.compute.utils [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1678.570066] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.576558] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5285bdc4-1918-a180-7afc-7153ecc682de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.731064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd989f7b-c43b-4359-9aef-3a2bbc8f5c45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.735150] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.759s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.735369] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.737538] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.838s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.737773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.739464] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.518s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.741093] env[63379]: INFO nova.compute.claims [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1678.765600] env[63379]: INFO nova.scheduler.client.report [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Deleted allocations for instance b91a5b89-0456-431d-b099-adda3a6b3024 [ 1678.770420] env[63379]: INFO nova.scheduler.client.report [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Deleted allocations for instance 5aad86f8-0b3b-43ca-982b-c670e3411c01 [ 1678.791442] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Updated VIF entry in instance network info cache for port 9b6c5265-06a3-4c47-b5cf-a656af402fa9. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1678.791816] env[63379]: DEBUG nova.network.neutron [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Updating instance_info_cache with network_info: [{"id": "9b6c5265-06a3-4c47-b5cf-a656af402fa9", "address": "fa:16:3e:fe:de:da", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b6c5265-06", "ovs_interfaceid": "9b6c5265-06a3-4c47-b5cf-a656af402fa9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.903091] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779758, 'name': CreateVM_Task, 'duration_secs': 0.687729} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.905711] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1678.906015] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779759, 'name': ReconfigVM_Task, 'duration_secs': 0.681561} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.906676] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.906842] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.907194] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1678.907517] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Reconfigured VM instance instance-00000049 to attach disk [datastore1] da66c3d9-ca03-4113-8703-64b666628936/da66c3d9-ca03-4113-8703-64b666628936.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1678.908139] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-533b3015-ecd3-4e8d-88ce-d4b5dd472e6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.909559] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-380d0b77-ecf9-4c0b-86e0-88b6579b7591 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.913998] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1678.913998] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52afb5d1-17cf-78e4-0b8d-190a43d38949" [ 1678.913998] env[63379]: _type = "Task" [ 1678.913998] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.918121] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1678.918121] env[63379]: value = "task-1779760" [ 1678.918121] env[63379]: _type = "Task" [ 1678.918121] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.923493] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52afb5d1-17cf-78e4-0b8d-190a43d38949, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.928331] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779760, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.074611] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.075196] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5285bdc4-1918-a180-7afc-7153ecc682de, 'name': SearchDatastore_Task, 'duration_secs': 0.020365} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.075483] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.075731] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655/7687aaa1-d1a0-4d0d-a6b4-47c454fe3655.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1679.076646] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-445ae022-ecca-4894-8a80-6e774f6ec685 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.083553] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1679.083553] env[63379]: value = "task-1779761" [ 1679.083553] env[63379]: _type = "Task" [ 1679.083553] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.092612] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.274761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8452693d-f0e2-4485-8ee9-46a4f6490094 tempest-ServerShowV257Test-2050319880 tempest-ServerShowV257Test-2050319880-project-member] Lock "b91a5b89-0456-431d-b099-adda3a6b3024" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.990s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.277848] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e82d24e3-5aa1-4854-a652-599af3f01d0c tempest-ServerAddressesTestJSON-287070463 tempest-ServerAddressesTestJSON-287070463-project-member] Lock "5aad86f8-0b3b-43ca-982b-c670e3411c01" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.808s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.295130] env[63379]: DEBUG oslo_concurrency.lockutils [req-2391257e-ae99-492c-bc35-d744fcbb0297 req-854ef2c4-694d-44ff-8a3a-5d975d42cd60 service nova] Releasing lock "refresh_cache-7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.433025] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779760, 'name': Rename_Task, 'duration_secs': 0.23821} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.433323] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52afb5d1-17cf-78e4-0b8d-190a43d38949, 'name': SearchDatastore_Task, 'duration_secs': 0.01807} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.433580] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1679.433909] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.434219] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1679.434469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.434622] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.434805] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1679.435111] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76b52822-c344-47e6-9566-57f28520b56e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.437048] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d3ad07f-42e2-4735-9a7d-7f18df629226 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.446078] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1679.446078] env[63379]: value = "task-1779762" [ 1679.446078] env[63379]: _type = "Task" [ 1679.446078] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.451027] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1679.451283] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1679.452446] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e21dc57c-97d0-4a03-8ffb-7f6de865854d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.457996] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.462224] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1679.462224] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ca8111-53f0-7334-2570-9aa552b26d0f" [ 1679.462224] env[63379]: _type = "Task" [ 1679.462224] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.472987] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ca8111-53f0-7334-2570-9aa552b26d0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.594153] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779761, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.957961] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779762, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.974449] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ca8111-53f0-7334-2570-9aa552b26d0f, 'name': SearchDatastore_Task, 'duration_secs': 0.073245} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.975745] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0b6816a-e0ee-406b-866f-d773e39f1707 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.984825] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1679.984825] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52915419-5672-176f-43b4-0f48816db9ce" [ 1679.984825] env[63379]: _type = "Task" [ 1679.984825] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.992515] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52915419-5672-176f-43b4-0f48816db9ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.096207] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583186} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.096610] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655/7687aaa1-d1a0-4d0d-a6b4-47c454fe3655.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1680.096734] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1680.096990] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09f02992-2384-413f-bad7-70621760639d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.104977] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1680.104977] env[63379]: value = "task-1779763" [ 1680.104977] env[63379]: _type = "Task" [ 1680.104977] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.115520] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.140512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.140828] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.141076] env[63379]: INFO nova.compute.manager [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Attaching volume 136f5b73-3e46-4a97-a860-2727b3e8d24e to /dev/sdb [ 1680.181987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f81959-e9da-42a9-9bb8-d67cfaa8ea79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.186591] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bc5c90-c1f5-4016-ad9b-18d4f5577e6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.199549] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6b735c-f6dc-4208-8cd3-41b6df12b093 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.204271] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e898abe1-043c-439a-ae21-d18716b0ad2d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.239665] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bb698a-ef24-471c-8ab9-53c53cab15b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.242615] env[63379]: DEBUG nova.virt.block_device [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Updating existing volume attachment record: 91a06e83-247d-4598-8de0-a8c694e7e1ac {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1680.250770] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4d2097-ef03-40e2-9bef-f50ead802bcd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.265269] env[63379]: DEBUG nova.compute.provider_tree [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1680.461830] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779762, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.495349] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52915419-5672-176f-43b4-0f48816db9ce, 'name': SearchDatastore_Task, 'duration_secs': 0.026343} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.495618] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.495869] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1680.496372] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-637265a3-59ac-49c6-a402-8e8d907c2b89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.502626] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1680.502626] env[63379]: value = "task-1779765" [ 1680.502626] env[63379]: _type = "Task" [ 1680.502626] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.515493] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779765, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.616201] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067591} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.616511] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1680.617455] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47fcbe3-486f-44e5-8a9b-3072f208570a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.640957] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655/7687aaa1-d1a0-4d0d-a6b4-47c454fe3655.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1680.641344] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e784b05d-34ac-4d37-b818-0de07b9f852b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.663423] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1680.663423] env[63379]: value = "task-1779766" [ 1680.663423] env[63379]: _type = "Task" [ 1680.663423] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.671736] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779766, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.770033] env[63379]: DEBUG nova.scheduler.client.report [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1680.935219] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "266cc3d5-c10d-4367-a879-d170802495db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.935553] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "266cc3d5-c10d-4367-a879-d170802495db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.935714] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "266cc3d5-c10d-4367-a879-d170802495db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.935914] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "266cc3d5-c10d-4367-a879-d170802495db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.936237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "266cc3d5-c10d-4367-a879-d170802495db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.940370] env[63379]: INFO nova.compute.manager [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Terminating instance [ 1680.942863] env[63379]: DEBUG nova.compute.manager [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1680.943055] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1680.944017] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a403e615-87ba-4b09-8a32-888a78fb1ac2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.954054] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1680.954833] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f95c6f21-bfba-4a70-b952-8a71b03d8a02 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.960464] env[63379]: DEBUG oslo_vmware.api [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779762, 'name': PowerOnVM_Task, 'duration_secs': 1.244646} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.961922] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1680.962230] env[63379]: INFO nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Took 9.96 seconds to spawn the instance on the hypervisor. [ 1680.962358] env[63379]: DEBUG nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1680.962731] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1680.962731] env[63379]: value = "task-1779769" [ 1680.962731] env[63379]: _type = "Task" [ 1680.962731] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.963491] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568300d0-b951-48f2-b696-b59ff9769161 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.983046] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779769, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.013956] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779765, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.174220] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.275403] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.276078] env[63379]: DEBUG nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1681.278673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.171s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.278877] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.280900] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.559s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.281201] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.282807] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.911s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.283031] env[63379]: DEBUG nova.objects.instance [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1681.312188] env[63379]: INFO nova.scheduler.client.report [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Deleted allocations for instance f082cdd7-228e-4100-b301-5af6daea9b36 [ 1681.316892] env[63379]: INFO nova.scheduler.client.report [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted allocations for instance f10fe64d-a09e-488a-b609-3e38922cf2e0 [ 1681.478784] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779769, 'name': PowerOffVM_Task, 'duration_secs': 0.219736} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.479077] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.479260] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1681.479514] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ee9fbf3-44a2-497e-b856-833bfb7154ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.493026] env[63379]: INFO nova.compute.manager [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Took 40.38 seconds to build instance. [ 1681.515483] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779765, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633117} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.515789] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1681.516123] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1681.516415] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe502b0b-59f7-4f7e-9011-ef8f43f53c01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.525889] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1681.525889] env[63379]: value = "task-1779771" [ 1681.525889] env[63379]: _type = "Task" [ 1681.525889] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.533530] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779771, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.591956] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1681.592327] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1681.592591] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Deleting the datastore file [datastore1] 266cc3d5-c10d-4367-a879-d170802495db {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1681.592812] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35a81192-53b9-4bce-a783-9b980694d046 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.602224] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for the task: (returnval){ [ 1681.602224] env[63379]: value = "task-1779772" [ 1681.602224] env[63379]: _type = "Task" [ 1681.602224] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.608498] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.675468] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779766, 'name': ReconfigVM_Task, 'duration_secs': 0.811332} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.675786] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655/7687aaa1-d1a0-4d0d-a6b4-47c454fe3655.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1681.677329] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d7937a1-7a12-4a26-ab0d-047cd1c50c95 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.683526] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1681.683526] env[63379]: value = "task-1779773" [ 1681.683526] env[63379]: _type = "Task" [ 1681.683526] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.690909] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779773, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.788347] env[63379]: DEBUG nova.compute.utils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1681.793979] env[63379]: DEBUG nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1681.794219] env[63379]: DEBUG nova.network.neutron [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1681.830945] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b62c5475-063e-43b5-bced-13bc1ea14379 tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "f082cdd7-228e-4100-b301-5af6daea9b36" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.793s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.832322] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f809585-762e-4449-97c5-07a91f390322 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "f10fe64d-a09e-488a-b609-3e38922cf2e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.306s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.850063] env[63379]: DEBUG nova.policy [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9162483675d540dfb8551206627b50e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '767980ba969142098ccbdf031f6e62a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1681.995708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4908fd33-98c7-493e-a36b-5a56b8fcfb40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "da66c3d9-ca03-4113-8703-64b666628936" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.888s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.037451] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779771, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075526} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.037945] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1682.038986] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e8c7b1-9c0e-433d-aa73-8e49043f69d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.062021] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1682.062141] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-412dd661-1eef-4642-8ec6-06e53620355e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.084011] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1682.084011] env[63379]: value = "task-1779774" [ 1682.084011] env[63379]: _type = "Task" [ 1682.084011] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.092105] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779774, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.110051] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.193866] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779773, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.238890] env[63379]: DEBUG nova.network.neutron [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Successfully created port: 13196237-c6ec-4167-b9f2-5818ee2ad126 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1682.298850] env[63379]: DEBUG nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1682.302299] env[63379]: DEBUG oslo_concurrency.lockutils [None req-acd9b004-e7db-4b10-bcd0-3b085c1a7f6a tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.303389] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.027s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.303587] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.305623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.210s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.305791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.307555] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.745s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.309018] env[63379]: INFO nova.compute.claims [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1682.335351] env[63379]: INFO nova.scheduler.client.report [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted allocations for instance acc8aa2f-41a8-4f06-8227-a1bae9c93f44 [ 1682.345313] env[63379]: INFO nova.scheduler.client.report [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Deleted allocations for instance 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f [ 1682.421800] env[63379]: INFO nova.compute.manager [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Rescuing [ 1682.422146] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.422259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.422464] env[63379]: DEBUG nova.network.neutron [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1682.449664] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.451060] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.451060] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.451060] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.451060] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.455109] env[63379]: INFO nova.compute.manager [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Terminating instance [ 1682.455109] env[63379]: DEBUG nova.compute.manager [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1682.455109] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1682.455216] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05da5251-8c8a-4b41-bd0f-b978b2c66e97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.463397] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1682.463889] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baf0d825-00c9-4006-8ea7-5fe9cc2c7bc7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.471489] env[63379]: DEBUG oslo_vmware.api [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1682.471489] env[63379]: value = "task-1779775" [ 1682.471489] env[63379]: _type = "Task" [ 1682.471489] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.480606] env[63379]: DEBUG oslo_vmware.api [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.596275] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.610832] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.696169] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779773, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.842075] env[63379]: DEBUG oslo_concurrency.lockutils [None req-564d0e68-17df-4c5b-ab32-1283a9919c5d tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "acc8aa2f-41a8-4f06-8227-a1bae9c93f44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.336s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.853584] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8ef4fd12-bb92-4260-b551-82621a44da83 tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "36681a38-7cfd-44cf-8b8f-1f4dfb613f4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.568s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.981618] env[63379]: DEBUG oslo_vmware.api [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779775, 'name': PowerOffVM_Task, 'duration_secs': 0.291796} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.981788] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1682.981893] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1682.982170] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cb640d9-8bc6-44dd-a0e7-ffb9b5b96a33 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.098940] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.113677] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.182044] env[63379]: DEBUG nova.network.neutron [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Updating instance_info_cache with network_info: [{"id": "7a48c3ef-9850-43b6-b138-d7cbb329face", "address": "fa:16:3e:d8:c3:c8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a48c3ef-98", "ovs_interfaceid": "7a48c3ef-9850-43b6-b138-d7cbb329face", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.197177] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779773, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.287026] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1683.287026] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1683.287026] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Deleting the datastore file [datastore1] aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.287026] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d00ad60-f7a6-4fb5-a942-3bfca925c31f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.292018] env[63379]: DEBUG oslo_vmware.api [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for the task: (returnval){ [ 1683.292018] env[63379]: value = "task-1779778" [ 1683.292018] env[63379]: _type = "Task" [ 1683.292018] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.300256] env[63379]: DEBUG oslo_vmware.api [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779778, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.311397] env[63379]: DEBUG nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1683.345836] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1683.346360] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1683.346737] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1683.347123] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1683.350023] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1683.350023] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1683.350023] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1683.350023] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1683.350023] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1683.350023] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1683.350023] env[63379]: DEBUG nova.virt.hardware [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1683.350023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07666a8c-6ddf-476d-b6f6-e925c00e11a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.363622] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120a278e-b241-4c33-bd2d-752e711a424a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.419020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "ec1f7a44-7344-43fb-9d51-688731d8ce14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.419020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.419164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "ec1f7a44-7344-43fb-9d51-688731d8ce14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.419349] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.419594] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.422847] env[63379]: INFO nova.compute.manager [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Terminating instance [ 1683.427252] env[63379]: DEBUG nova.compute.manager [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1683.429019] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1683.429727] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ab46f5-0cd3-49f2-9e50-3a7892815749 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.438578] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1683.439533] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93614b9b-8cd6-42b7-863c-29aefb4770a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.450823] env[63379]: DEBUG oslo_vmware.api [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1683.450823] env[63379]: value = "task-1779779" [ 1683.450823] env[63379]: _type = "Task" [ 1683.450823] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.463315] env[63379]: DEBUG oslo_vmware.api [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.619278] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.636718] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.684992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.698998] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779773, 'name': Rename_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.764961] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200de621-0e01-4bdf-9d0d-49d07eb82e59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.778239] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64700d5-1aa0-4b83-881e-bf8124e3429c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.824480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2f1ef0-bb6e-451d-9ba2-0e525cd4b003 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.832982] env[63379]: DEBUG oslo_vmware.api [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Task: {'id': task-1779778, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.529689} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.835508] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.835814] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.836091] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.836326] env[63379]: INFO nova.compute.manager [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Took 1.38 seconds to destroy the instance on the hypervisor. [ 1683.837126] env[63379]: DEBUG oslo.service.loopingcall [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.837126] env[63379]: DEBUG nova.compute.manager [-] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1683.837255] env[63379]: DEBUG nova.network.neutron [-] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.840818] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f977b327-a3a9-4574-b7bb-906be833d39c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.858526] env[63379]: DEBUG nova.compute.provider_tree [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.885877] env[63379]: DEBUG nova.compute.manager [req-d057cc50-8d08-4757-97d3-5f47c7d0b3b1 req-dca0f6fa-7d7b-46c0-a6c1-2bfd47a1d1dc service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Received event network-vif-plugged-13196237-c6ec-4167-b9f2-5818ee2ad126 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1683.886172] env[63379]: DEBUG oslo_concurrency.lockutils [req-d057cc50-8d08-4757-97d3-5f47c7d0b3b1 req-dca0f6fa-7d7b-46c0-a6c1-2bfd47a1d1dc service nova] Acquiring lock "8b33e64a-ea19-4974-8c2d-350615b1e061-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.886431] env[63379]: DEBUG oslo_concurrency.lockutils [req-d057cc50-8d08-4757-97d3-5f47c7d0b3b1 req-dca0f6fa-7d7b-46c0-a6c1-2bfd47a1d1dc service nova] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.886635] env[63379]: DEBUG oslo_concurrency.lockutils [req-d057cc50-8d08-4757-97d3-5f47c7d0b3b1 req-dca0f6fa-7d7b-46c0-a6c1-2bfd47a1d1dc service nova] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.886892] env[63379]: DEBUG nova.compute.manager [req-d057cc50-8d08-4757-97d3-5f47c7d0b3b1 req-dca0f6fa-7d7b-46c0-a6c1-2bfd47a1d1dc service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] No waiting events found dispatching network-vif-plugged-13196237-c6ec-4167-b9f2-5818ee2ad126 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1683.890020] env[63379]: WARNING nova.compute.manager [req-d057cc50-8d08-4757-97d3-5f47c7d0b3b1 req-dca0f6fa-7d7b-46c0-a6c1-2bfd47a1d1dc service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Received unexpected event network-vif-plugged-13196237-c6ec-4167-b9f2-5818ee2ad126 for instance with vm_state building and task_state spawning. [ 1683.963254] env[63379]: DEBUG oslo_vmware.api [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779779, 'name': PowerOffVM_Task, 'duration_secs': 0.22799} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.963594] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1683.963895] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1683.964276] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30852948-4b43-4776-a042-cc2abfbfb261 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.037334] env[63379]: DEBUG nova.network.neutron [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Successfully updated port: 13196237-c6ec-4167-b9f2-5818ee2ad126 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1684.059876] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1684.059876] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1684.059876] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleting the datastore file [datastore1] ec1f7a44-7344-43fb-9d51-688731d8ce14 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1684.059876] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3458077c-20ee-4c29-9f55-6c14d20799fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.069148] env[63379]: DEBUG oslo_vmware.api [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1684.069148] env[63379]: value = "task-1779781" [ 1684.069148] env[63379]: _type = "Task" [ 1684.069148] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.076823] env[63379]: DEBUG oslo_vmware.api [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.100514] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779774, 'name': ReconfigVM_Task, 'duration_secs': 1.881534} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.100514] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Reconfigured VM instance instance-00000046 to attach disk [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef/c1858f41-75e7-4eee-a6db-493e150622ef.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1684.100514] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b9d713a-df7a-44dc-a906-ea94f1b168b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.109238] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1684.109238] env[63379]: value = "task-1779782" [ 1684.109238] env[63379]: _type = "Task" [ 1684.109238] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.114875] env[63379]: DEBUG oslo_vmware.api [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Task: {'id': task-1779772, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.146872} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.115491] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1684.115690] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1684.115899] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1684.116105] env[63379]: INFO nova.compute.manager [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Took 3.17 seconds to destroy the instance on the hypervisor. [ 1684.116366] env[63379]: DEBUG oslo.service.loopingcall [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.116884] env[63379]: DEBUG nova.compute.manager [-] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1684.117008] env[63379]: DEBUG nova.network.neutron [-] [instance: 266cc3d5-c10d-4367-a879-d170802495db] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1684.121492] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779782, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.197675] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779773, 'name': Rename_Task, 'duration_secs': 2.055365} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.197975] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1684.198256] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f2398c0-910f-45c7-b9c1-193bc1793faa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.205379] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1684.205379] env[63379]: value = "task-1779783" [ 1684.205379] env[63379]: _type = "Task" [ 1684.205379] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.214340] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.221435] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1684.221711] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab21defe-a6f3-4958-a532-b4e4de9ba588 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.228639] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1684.228639] env[63379]: value = "task-1779784" [ 1684.228639] env[63379]: _type = "Task" [ 1684.228639] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.240202] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.360445] env[63379]: DEBUG nova.compute.manager [req-e86888f2-a2eb-4678-8497-8f942a465017 req-f91b103d-db77-40f5-af11-1ba1abc59b9a service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Received event network-vif-deleted-e034314c-72fb-4187-9c6b-1cd2e95aa97a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1684.360674] env[63379]: INFO nova.compute.manager [req-e86888f2-a2eb-4678-8497-8f942a465017 req-f91b103d-db77-40f5-af11-1ba1abc59b9a service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Neutron deleted interface e034314c-72fb-4187-9c6b-1cd2e95aa97a; detaching it from the instance and deleting it from the info cache [ 1684.360854] env[63379]: DEBUG nova.network.neutron [req-e86888f2-a2eb-4678-8497-8f942a465017 req-f91b103d-db77-40f5-af11-1ba1abc59b9a service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.362728] env[63379]: DEBUG nova.scheduler.client.report [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1684.541020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "refresh_cache-8b33e64a-ea19-4974-8c2d-350615b1e061" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.541020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "refresh_cache-8b33e64a-ea19-4974-8c2d-350615b1e061" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.541020] env[63379]: DEBUG nova.network.neutron [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1684.583928] env[63379]: DEBUG oslo_vmware.api [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204671} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.583928] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1684.584142] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1684.584360] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1684.585033] env[63379]: INFO nova.compute.manager [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1684.585033] env[63379]: DEBUG oslo.service.loopingcall [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.585033] env[63379]: DEBUG nova.compute.manager [-] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1684.585191] env[63379]: DEBUG nova.network.neutron [-] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1684.628585] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779782, 'name': Rename_Task, 'duration_secs': 0.17426} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.628884] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1684.629160] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65813307-0eef-4995-ae62-29ca7f937fe4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.636797] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1684.636797] env[63379]: value = "task-1779785" [ 1684.636797] env[63379]: _type = "Task" [ 1684.636797] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.645922] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.717024] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779783, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.729658] env[63379]: DEBUG nova.network.neutron [-] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.744223] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779784, 'name': PowerOffVM_Task, 'duration_secs': 0.205782} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.745353] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1684.746356] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305d405d-8141-4fb2-b48d-1ff2cb9547c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.769300] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51aad618-b539-4756-a8fb-f53451f6df33 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.807121] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1684.807121] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-badb5bd1-64c9-45e1-88a8-bf6c8d43a96e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.818588] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1684.818588] env[63379]: value = "task-1779786" [ 1684.818588] env[63379]: _type = "Task" [ 1684.818588] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.827023] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1684.827395] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1684.827701] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.827969] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.829020] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1684.829020] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38f9e339-c518-4992-8f0b-ed5c67532ed0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.841075] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1684.842172] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1684.842172] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1901bb6b-d948-45e3-a1fd-f70204026932 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.848121] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1684.848121] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cafb2a-8f0e-74ff-ad92-4d734683a02d" [ 1684.848121] env[63379]: _type = "Task" [ 1684.848121] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.857305] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cafb2a-8f0e-74ff-ad92-4d734683a02d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.873046] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.873046] env[63379]: DEBUG nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1684.878088] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bac54475-d474-4178-a565-46ccb0a2a5bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.882539] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.607s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.884131] env[63379]: INFO nova.compute.claims [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1684.887579] env[63379]: DEBUG nova.network.neutron [-] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.897545] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bac095-f2cd-43a9-b6a1-1e5cc4030e7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.910262] env[63379]: DEBUG nova.network.neutron [-] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.936812] env[63379]: DEBUG nova.compute.manager [req-e86888f2-a2eb-4678-8497-8f942a465017 req-f91b103d-db77-40f5-af11-1ba1abc59b9a service nova] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Detach interface failed, port_id=e034314c-72fb-4187-9c6b-1cd2e95aa97a, reason: Instance aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1685.090315] env[63379]: DEBUG nova.network.neutron [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1685.147983] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779785, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.220368] env[63379]: DEBUG oslo_vmware.api [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779783, 'name': PowerOnVM_Task, 'duration_secs': 0.525921} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.220688] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1685.220915] env[63379]: INFO nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Took 11.60 seconds to spawn the instance on the hypervisor. [ 1685.221101] env[63379]: DEBUG nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1685.224020] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15dc8cd6-4868-442d-9bea-b7e257e4e99f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.237049] env[63379]: INFO nova.compute.manager [-] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Took 1.40 seconds to deallocate network for instance. [ 1685.265585] env[63379]: DEBUG nova.network.neutron [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Updating instance_info_cache with network_info: [{"id": "13196237-c6ec-4167-b9f2-5818ee2ad126", "address": "fa:16:3e:22:1e:bf", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13196237-c6", "ovs_interfaceid": "13196237-c6ec-4167-b9f2-5818ee2ad126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.360650] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cafb2a-8f0e-74ff-ad92-4d734683a02d, 'name': SearchDatastore_Task, 'duration_secs': 0.024116} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.361480] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a18c706-7a1b-42da-a938-e50a13cbfb43 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.367410] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1685.367410] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5208311e-f1f9-b85f-e1b4-c33b98a0597f" [ 1685.367410] env[63379]: _type = "Task" [ 1685.367410] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.376485] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5208311e-f1f9-b85f-e1b4-c33b98a0597f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.390381] env[63379]: DEBUG nova.compute.utils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1685.395016] env[63379]: INFO nova.compute.manager [-] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Took 0.81 seconds to deallocate network for instance. [ 1685.396107] env[63379]: DEBUG nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1685.396107] env[63379]: DEBUG nova.network.neutron [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1685.413756] env[63379]: INFO nova.compute.manager [-] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Took 1.30 seconds to deallocate network for instance. [ 1685.451347] env[63379]: DEBUG nova.policy [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'deef4f9ae0754a6c8a7f673c10a76408', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c01c5c8c3734c4ea066324e542e7374', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1685.654408] env[63379]: DEBUG oslo_vmware.api [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779785, 'name': PowerOnVM_Task, 'duration_secs': 0.884662} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.654901] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1685.657187] env[63379]: DEBUG nova.compute.manager [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1685.657187] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a02ca0-2c7d-4c00-89a0-d6962f9956fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.744747] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.744892] env[63379]: INFO nova.compute.manager [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Took 44.23 seconds to build instance. [ 1685.770190] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "refresh_cache-8b33e64a-ea19-4974-8c2d-350615b1e061" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.770190] env[63379]: DEBUG nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Instance network_info: |[{"id": "13196237-c6ec-4167-b9f2-5818ee2ad126", "address": "fa:16:3e:22:1e:bf", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13196237-c6", "ovs_interfaceid": "13196237-c6ec-4167-b9f2-5818ee2ad126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1685.770190] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:1e:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13196237-c6ec-4167-b9f2-5818ee2ad126', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1685.778501] env[63379]: DEBUG oslo.service.loopingcall [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1685.779845] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1685.779845] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40508f61-3436-41d9-915d-b526079ce89f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.800468] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1685.800468] env[63379]: value = "task-1779788" [ 1685.800468] env[63379]: _type = "Task" [ 1685.800468] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.808615] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779788, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.811861] env[63379]: DEBUG nova.network.neutron [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Successfully created port: bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1685.878486] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5208311e-f1f9-b85f-e1b4-c33b98a0597f, 'name': SearchDatastore_Task, 'duration_secs': 0.018367} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.881020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.881020] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] da66c3d9-ca03-4113-8703-64b666628936/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. {{(pid=63379) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1685.881020] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bc6732c-bf9d-4ca8-98fc-bde86fec2f55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.886558] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1685.886558] env[63379]: value = "task-1779789" [ 1685.886558] env[63379]: _type = "Task" [ 1685.886558] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.896562] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.898280] env[63379]: DEBUG nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1685.905752] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.920136] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.960068] env[63379]: DEBUG nova.compute.manager [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Received event network-changed-13196237-c6ec-4167-b9f2-5818ee2ad126 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1685.960301] env[63379]: DEBUG nova.compute.manager [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Refreshing instance network info cache due to event network-changed-13196237-c6ec-4167-b9f2-5818ee2ad126. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1685.960645] env[63379]: DEBUG oslo_concurrency.lockutils [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] Acquiring lock "refresh_cache-8b33e64a-ea19-4974-8c2d-350615b1e061" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.960645] env[63379]: DEBUG oslo_concurrency.lockutils [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] Acquired lock "refresh_cache-8b33e64a-ea19-4974-8c2d-350615b1e061" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.960805] env[63379]: DEBUG nova.network.neutron [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Refreshing network info cache for port 13196237-c6ec-4167-b9f2-5818ee2ad126 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1686.181931] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.246929] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f67a3183-9ab9-4f59-b163-5f20247b19d5 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.743s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.263540] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5b5e8b-13f7-45b9-8219-119802b37f6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.277815] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58942cd-23f1-4076-8567-a4b78b436af7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.320771] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc544f45-ed27-46e5-8af3-b34bbb5ed170 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.333635] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7e6a89-6a2a-4ba6-ae0d-8f8963a304a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.337621] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779788, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.347726] env[63379]: DEBUG nova.compute.provider_tree [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1686.367452] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.367783] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.368017] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.368231] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.368479] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.371154] env[63379]: INFO nova.compute.manager [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Terminating instance [ 1686.373612] env[63379]: DEBUG nova.compute.manager [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1686.373873] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1686.374733] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a849318-e4d4-4fd3-ba2a-63db4ed5eb0a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.385522] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1686.385939] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9890ee1a-6e98-49ce-bac1-eb28ee6cefdd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.397513] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779789, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.398942] env[63379]: DEBUG oslo_vmware.api [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1686.398942] env[63379]: value = "task-1779790" [ 1686.398942] env[63379]: _type = "Task" [ 1686.398942] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.411687] env[63379]: DEBUG oslo_vmware.api [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.799579] env[63379]: DEBUG nova.network.neutron [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Updated VIF entry in instance network info cache for port 13196237-c6ec-4167-b9f2-5818ee2ad126. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1686.800176] env[63379]: DEBUG nova.network.neutron [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Updating instance_info_cache with network_info: [{"id": "13196237-c6ec-4167-b9f2-5818ee2ad126", "address": "fa:16:3e:22:1e:bf", "network": {"id": "0f1c71c4-9a40-4d5f-9ce7-b2e38109b1f5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-969152574-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "767980ba969142098ccbdf031f6e62a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13196237-c6", "ovs_interfaceid": "13196237-c6ec-4167-b9f2-5818ee2ad126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.825557] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779788, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.854520] env[63379]: DEBUG nova.scheduler.client.report [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1686.901783] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779789, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519749} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.905605] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] da66c3d9-ca03-4113-8703-64b666628936/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. [ 1686.906784] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13fb139-54eb-4fad-a07d-8b208e402ea8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.912867] env[63379]: DEBUG nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1686.940952] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] da66c3d9-ca03-4113-8703-64b666628936/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1686.942298] env[63379]: DEBUG oslo_vmware.api [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779790, 'name': PowerOffVM_Task, 'duration_secs': 0.223249} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.942298] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-542d9ee2-553f-4c39-b2a0-2fd725f4bc5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.959263] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1686.959526] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1686.962166] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa4fe1c3-da36-4487-993a-4871646fa0f0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.969730] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1686.969730] env[63379]: value = "task-1779792" [ 1686.969730] env[63379]: _type = "Task" [ 1686.969730] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.976744] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1686.976993] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1686.977168] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1686.977396] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1686.977545] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1686.977700] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1686.977915] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1686.978308] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1686.978695] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1686.978750] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1686.979203] env[63379]: DEBUG nova.virt.hardware [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1686.979736] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56022c4-bf58-4659-a9fe-295b20b032e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.986622] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779792, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.992294] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943d0a59-fd4c-4bb2-909b-c50c94c86646 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.091428] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1687.091670] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1687.091858] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Deleting the datastore file [datastore1] 41952d7b-ce23-4e9b-8843-bbac1d3099c1 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1687.092144] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84c181dd-fde2-4ea1-b81f-58b9c68ae80a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.098423] env[63379]: DEBUG oslo_vmware.api [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for the task: (returnval){ [ 1687.098423] env[63379]: value = "task-1779793" [ 1687.098423] env[63379]: _type = "Task" [ 1687.098423] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.106778] env[63379]: DEBUG oslo_vmware.api [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.299908] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1687.299908] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369424', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'name': 'volume-136f5b73-3e46-4a97-a860-2727b3e8d24e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f', 'attached_at': '', 'detached_at': '', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'serial': '136f5b73-3e46-4a97-a860-2727b3e8d24e'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1687.299908] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700f27e7-1bce-456c-a9cd-9ddbf777e54a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.303295] env[63379]: DEBUG oslo_concurrency.lockutils [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] Releasing lock "refresh_cache-8b33e64a-ea19-4974-8c2d-350615b1e061" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.303715] env[63379]: DEBUG nova.compute.manager [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Received event network-vif-deleted-96c2a882-fdd3-4e25-92d0-a68dd0bcb811 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1687.304146] env[63379]: DEBUG nova.compute.manager [req-2abdb104-521a-47da-8b38-db2556b56285 req-29f298cd-dc2a-45c4-b6be-1f25efe4a5d6 service nova] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Received event network-vif-deleted-8c5374c2-6a00-48c8-846d-94d7f695d456 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1687.322786] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b0822b-2e25-4a60-bc3b-3f96ffbab572 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.331771] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779788, 'name': CreateVM_Task, 'duration_secs': 1.353562} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.348105] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1687.355976] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-136f5b73-3e46-4a97-a860-2727b3e8d24e/volume-136f5b73-3e46-4a97-a860-2727b3e8d24e.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1687.357080] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.357487] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.357943] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1687.358322] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0001616-bfac-4812-a182-1bff13d7c698 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.371736] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e5fd31e-5f8d-4b1a-bbdb-b605a9c5fd12 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.373994] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.375019] env[63379]: DEBUG nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1687.377645] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.808s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.378418] env[63379]: DEBUG nova.objects.instance [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lazy-loading 'resources' on Instance uuid f087b3ac-13e2-4e55-a3ce-5e6bd3379239 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.385047] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1687.385047] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521fb5d3-8b4d-ccbb-8d98-2963dda3b1e7" [ 1687.385047] env[63379]: _type = "Task" [ 1687.385047] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.386510] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1687.386510] env[63379]: value = "task-1779794" [ 1687.386510] env[63379]: _type = "Task" [ 1687.386510] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.399336] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779794, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.402475] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521fb5d3-8b4d-ccbb-8d98-2963dda3b1e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.482359] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779792, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.614948] env[63379]: DEBUG oslo_vmware.api [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Task: {'id': task-1779793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392387} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.614948] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1687.614948] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1687.614948] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1687.614948] env[63379]: INFO nova.compute.manager [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1687.614948] env[63379]: DEBUG oslo.service.loopingcall [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1687.614948] env[63379]: DEBUG nova.compute.manager [-] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1687.614948] env[63379]: DEBUG nova.network.neutron [-] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1687.617608] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "c1858f41-75e7-4eee-a6db-493e150622ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.617869] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "c1858f41-75e7-4eee-a6db-493e150622ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.618087] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "c1858f41-75e7-4eee-a6db-493e150622ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.618277] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "c1858f41-75e7-4eee-a6db-493e150622ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.618459] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "c1858f41-75e7-4eee-a6db-493e150622ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.621132] env[63379]: INFO nova.compute.manager [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Terminating instance [ 1687.626632] env[63379]: DEBUG nova.compute.manager [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1687.626885] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1687.628507] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfc8e34-b807-47d4-83a8-b7008ffb234a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.638360] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1687.638804] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04652a0f-d7c9-4b79-aab4-9936e1abe501 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.646422] env[63379]: DEBUG oslo_vmware.api [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1687.646422] env[63379]: value = "task-1779795" [ 1687.646422] env[63379]: _type = "Task" [ 1687.646422] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.655327] env[63379]: DEBUG oslo_vmware.api [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.775736] env[63379]: DEBUG nova.network.neutron [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Successfully updated port: bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.882460] env[63379]: DEBUG nova.compute.utils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1687.887804] env[63379]: DEBUG nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1687.887980] env[63379]: DEBUG nova.network.neutron [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1687.903178] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779794, 'name': ReconfigVM_Task, 'duration_secs': 0.49103} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.907970] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-136f5b73-3e46-4a97-a860-2727b3e8d24e/volume-136f5b73-3e46-4a97-a860-2727b3e8d24e.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1687.913274] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521fb5d3-8b4d-ccbb-8d98-2963dda3b1e7, 'name': SearchDatastore_Task, 'duration_secs': 0.015709} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.916176] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9599c918-8049-4d90-abd1-8ea58f17ba2f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.925792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.926150] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1687.926424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.926603] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.926794] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1687.927585] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e46ecde7-2518-4288-9c83-3bd2fc05ee7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.937579] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1687.937579] env[63379]: value = "task-1779796" [ 1687.937579] env[63379]: _type = "Task" [ 1687.937579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.952403] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779796, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.954708] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1687.954900] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1687.955694] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12ef3bbb-82c0-4966-8237-5b448f5777ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.965895] env[63379]: DEBUG nova.policy [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e520e980ae948de98154c236d6d1167', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0383d184346141d2a770326aacff4352', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1687.973143] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1687.973143] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5275eb69-72c1-0669-f818-c7b9d2a8d837" [ 1687.973143] env[63379]: _type = "Task" [ 1687.973143] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.000522] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779792, 'name': ReconfigVM_Task, 'duration_secs': 0.651283} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.000789] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5275eb69-72c1-0669-f818-c7b9d2a8d837, 'name': SearchDatastore_Task, 'duration_secs': 0.010016} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.004597] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Reconfigured VM instance instance-00000049 to attach disk [datastore1] da66c3d9-ca03-4113-8703-64b666628936/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1688.004597] env[63379]: DEBUG nova.compute.manager [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Received event network-vif-plugged-bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1688.004597] env[63379]: DEBUG oslo_concurrency.lockutils [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] Acquiring lock "a7cce485-7476-4ea1-b127-68d879e164cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.004597] env[63379]: DEBUG oslo_concurrency.lockutils [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] Lock "a7cce485-7476-4ea1-b127-68d879e164cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.004597] env[63379]: DEBUG oslo_concurrency.lockutils [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] Lock "a7cce485-7476-4ea1-b127-68d879e164cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.004597] env[63379]: DEBUG nova.compute.manager [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] No waiting events found dispatching network-vif-plugged-bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1688.004936] env[63379]: WARNING nova.compute.manager [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Received unexpected event network-vif-plugged-bf9adade-286a-4e50-a0a5-a80cd17209c6 for instance with vm_state building and task_state spawning. [ 1688.004936] env[63379]: DEBUG nova.compute.manager [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Received event network-changed-bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1688.005245] env[63379]: DEBUG nova.compute.manager [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Refreshing instance network info cache due to event network-changed-bf9adade-286a-4e50-a0a5-a80cd17209c6. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1688.005333] env[63379]: DEBUG oslo_concurrency.lockutils [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] Acquiring lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.005399] env[63379]: DEBUG oslo_concurrency.lockutils [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] Acquired lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.005553] env[63379]: DEBUG nova.network.neutron [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Refreshing network info cache for port bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1688.009825] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75750d33-8905-4525-9a4c-89f60f266c42 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.013173] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5294e445-19df-43f3-8a9a-8ae0352512ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.020255] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1688.020255] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52906ab0-f4fa-131d-64a8-de670c0a6895" [ 1688.020255] env[63379]: _type = "Task" [ 1688.020255] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.046876] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b94ba12-607a-4c3f-8745-5f8ceafbd60c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.058697] env[63379]: DEBUG nova.compute.manager [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1688.059897] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111786c2-7af5-43b7-9720-a5d0085fa0de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.069757] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52906ab0-f4fa-131d-64a8-de670c0a6895, 'name': SearchDatastore_Task, 'duration_secs': 0.014958} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.071399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.072122] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8b33e64a-ea19-4974-8c2d-350615b1e061/8b33e64a-ea19-4974-8c2d-350615b1e061.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1688.072122] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1688.072122] env[63379]: value = "task-1779797" [ 1688.072122] env[63379]: _type = "Task" [ 1688.072122] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.072388] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77e1e9ba-8b1b-4a30-9cb0-2ee16e42da5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.089871] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.091477] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1688.091477] env[63379]: value = "task-1779798" [ 1688.091477] env[63379]: _type = "Task" [ 1688.091477] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.101960] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.150652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.150652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.160134] env[63379]: DEBUG oslo_vmware.api [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779795, 'name': PowerOffVM_Task, 'duration_secs': 0.250799} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.160226] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1688.160359] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1688.160617] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84516782-923f-437e-a619-2a0250a9a9be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.207316] env[63379]: DEBUG nova.compute.manager [req-5a2ab4bf-cd94-4529-8da7-f9072b7aa79c req-cc96e336-f5cc-4b40-b3d3-400c0bdd90d2 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Received event network-vif-deleted-89d7e5cf-c802-47c1-97bd-981796ed50c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1688.207557] env[63379]: INFO nova.compute.manager [req-5a2ab4bf-cd94-4529-8da7-f9072b7aa79c req-cc96e336-f5cc-4b40-b3d3-400c0bdd90d2 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Neutron deleted interface 89d7e5cf-c802-47c1-97bd-981796ed50c7; detaching it from the instance and deleting it from the info cache [ 1688.207687] env[63379]: DEBUG nova.network.neutron [req-5a2ab4bf-cd94-4529-8da7-f9072b7aa79c req-cc96e336-f5cc-4b40-b3d3-400c0bdd90d2 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.278412] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.324100] env[63379]: DEBUG nova.network.neutron [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Successfully created port: d304bb93-6f61-492c-9e8c-ce1b0ac9131e {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1688.334203] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facc653f-4c09-4b4f-8f53-f534465c7717 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.345022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac91169b-c634-4448-b76a-b08990aeb522 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.346206] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1688.346427] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1688.346601] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleting the datastore file [datastore1] c1858f41-75e7-4eee-a6db-493e150622ef {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1688.347176] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a94b5456-86a1-4ea2-87c8-0a9033058ac5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.379190] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435e7f32-6872-40a9-afca-0f2c784c5903 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.381829] env[63379]: DEBUG oslo_vmware.api [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1688.381829] env[63379]: value = "task-1779800" [ 1688.381829] env[63379]: _type = "Task" [ 1688.381829] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.388241] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6223b67-7c17-4d92-91a1-1474ea73a1a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.394761] env[63379]: DEBUG nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1688.397231] env[63379]: DEBUG oslo_vmware.api [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.406724] env[63379]: DEBUG nova.compute.provider_tree [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.448075] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779796, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.563740] env[63379]: DEBUG nova.network.neutron [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1688.584461] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.584942] env[63379]: INFO nova.compute.manager [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] instance snapshotting [ 1688.587581] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5046fddf-5073-47cc-9369-fef3d30ac0c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.607760] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db46de78-2809-44bd-bc04-b97ba8fdd5c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.616719] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.646497] env[63379]: DEBUG nova.network.neutron [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.654278] env[63379]: DEBUG nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1688.664882] env[63379]: DEBUG nova.network.neutron [-] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.712991] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f450610-0184-4df5-9f14-d8ad852a4aca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.723390] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6261028f-66ee-4082-aade-06de7cedb2b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.762516] env[63379]: DEBUG nova.compute.manager [req-5a2ab4bf-cd94-4529-8da7-f9072b7aa79c req-cc96e336-f5cc-4b40-b3d3-400c0bdd90d2 service nova] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Detach interface failed, port_id=89d7e5cf-c802-47c1-97bd-981796ed50c7, reason: Instance 41952d7b-ce23-4e9b-8843-bbac1d3099c1 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1688.891905] env[63379]: DEBUG oslo_vmware.api [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.909873] env[63379]: DEBUG nova.scheduler.client.report [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1688.949986] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779796, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.085066] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.113980] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.122253] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1689.122529] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2ccf292b-2f84-4c8a-96ec-05e62cbf0fba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.129230] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1689.129230] env[63379]: value = "task-1779801" [ 1689.129230] env[63379]: _type = "Task" [ 1689.129230] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.136747] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779801, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.149368] env[63379]: DEBUG oslo_concurrency.lockutils [req-f0cb3f25-d64f-4bdb-9d03-983966d1ee61 req-2f0e5cf8-a96e-40a4-880a-ec46c44203f3 service nova] Releasing lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.149731] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.149886] env[63379]: DEBUG nova.network.neutron [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1689.167386] env[63379]: INFO nova.compute.manager [-] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Took 1.55 seconds to deallocate network for instance. [ 1689.181235] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.394472] env[63379]: DEBUG oslo_vmware.api [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779800, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.737976} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.394797] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1689.395021] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1689.395202] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1689.395387] env[63379]: INFO nova.compute.manager [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Took 1.77 seconds to destroy the instance on the hypervisor. [ 1689.395657] env[63379]: DEBUG oslo.service.loopingcall [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.395865] env[63379]: DEBUG nova.compute.manager [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1689.395961] env[63379]: DEBUG nova.network.neutron [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1689.403042] env[63379]: DEBUG nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1689.415132] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.417628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.674s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.417871] env[63379]: DEBUG nova.objects.instance [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lazy-loading 'resources' on Instance uuid aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.432923] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1689.433201] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1689.433366] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1689.433552] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1689.433706] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1689.433884] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1689.434174] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1689.434372] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1689.434566] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1689.434744] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1689.434929] env[63379]: DEBUG nova.virt.hardware [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1689.436558] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c392abe1-2d48-42a2-8bdf-15b516800c76 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.440429] env[63379]: INFO nova.scheduler.client.report [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Deleted allocations for instance f087b3ac-13e2-4e55-a3ce-5e6bd3379239 [ 1689.455162] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb1dfa0-8e54-439e-b387-2abbb594a208 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.459692] env[63379]: DEBUG oslo_vmware.api [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779796, 'name': ReconfigVM_Task, 'duration_secs': 1.184631} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.460057] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369424', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'name': 'volume-136f5b73-3e46-4a97-a860-2727b3e8d24e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f', 'attached_at': '', 'detached_at': '', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'serial': '136f5b73-3e46-4a97-a860-2727b3e8d24e'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1689.584442] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779797, 'name': ReconfigVM_Task, 'duration_secs': 1.182805} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.584852] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1689.585648] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec630cda-acac-426e-aafb-2a8205dd82f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.594097] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1689.594097] env[63379]: value = "task-1779802" [ 1689.594097] env[63379]: _type = "Task" [ 1689.594097] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.603024] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779802, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.615968] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779798, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.508787} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.616361] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8b33e64a-ea19-4974-8c2d-350615b1e061/8b33e64a-ea19-4974-8c2d-350615b1e061.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1689.617683] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1689.617683] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ec6596a4-6944-484a-bd13-fbfa1643a8e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.624845] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1689.624845] env[63379]: value = "task-1779803" [ 1689.624845] env[63379]: _type = "Task" [ 1689.624845] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.636404] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779803, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.642219] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779801, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.673330] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.700878] env[63379]: DEBUG nova.network.neutron [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1689.856366] env[63379]: DEBUG nova.network.neutron [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Successfully updated port: d304bb93-6f61-492c-9e8c-ce1b0ac9131e {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1689.872937] env[63379]: DEBUG nova.network.neutron [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Updating instance_info_cache with network_info: [{"id": "bf9adade-286a-4e50-a0a5-a80cd17209c6", "address": "fa:16:3e:f3:83:85", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf9adade-28", "ovs_interfaceid": "bf9adade-286a-4e50-a0a5-a80cd17209c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.951842] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0aaea575-d5d0-43f4-8013-919e60aafde7 tempest-ServerRescueTestJSONUnderV235-99662031 tempest-ServerRescueTestJSONUnderV235-99662031-project-member] Lock "f087b3ac-13e2-4e55-a3ce-5e6bd3379239" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.394s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.038925] env[63379]: DEBUG nova.compute.manager [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Received event network-vif-plugged-d304bb93-6f61-492c-9e8c-ce1b0ac9131e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1690.039720] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] Acquiring lock "861cda26-f938-4b2e-ba3d-56b8469b6034-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.039720] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.039720] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.039720] env[63379]: DEBUG nova.compute.manager [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] No waiting events found dispatching network-vif-plugged-d304bb93-6f61-492c-9e8c-ce1b0ac9131e {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1690.040121] env[63379]: WARNING nova.compute.manager [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Received unexpected event network-vif-plugged-d304bb93-6f61-492c-9e8c-ce1b0ac9131e for instance with vm_state building and task_state spawning. [ 1690.040248] env[63379]: DEBUG nova.compute.manager [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Received event network-changed-d304bb93-6f61-492c-9e8c-ce1b0ac9131e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1690.040398] env[63379]: DEBUG nova.compute.manager [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Refreshing instance network info cache due to event network-changed-d304bb93-6f61-492c-9e8c-ce1b0ac9131e. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1690.040575] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] Acquiring lock "refresh_cache-861cda26-f938-4b2e-ba3d-56b8469b6034" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.040713] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] Acquired lock "refresh_cache-861cda26-f938-4b2e-ba3d-56b8469b6034" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.040871] env[63379]: DEBUG nova.network.neutron [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Refreshing network info cache for port d304bb93-6f61-492c-9e8c-ce1b0ac9131e {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1690.105340] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779802, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.137227] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064763} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.137675] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1690.138643] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce28dc6f-10b6-456e-b777-d7e1f3d68994 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.143911] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779801, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.166465] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 8b33e64a-ea19-4974-8c2d-350615b1e061/8b33e64a-ea19-4974-8c2d-350615b1e061.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1690.169256] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0387c81-6891-4692-880d-bd2dff0f0da0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.184239] env[63379]: DEBUG nova.network.neutron [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.191376] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1690.191376] env[63379]: value = "task-1779804" [ 1690.191376] env[63379]: _type = "Task" [ 1690.191376] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.208075] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779804, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.232016] env[63379]: DEBUG nova.compute.manager [req-286be817-4419-47b4-a207-c63d27da5d5f req-3513e3d7-b68c-42ef-be90-f6c92959a08a service nova] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Received event network-vif-deleted-1a119dbf-427b-4b34-819c-d65a9f0f88a8 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1690.285233] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8162ca01-459c-48bb-a40a-eb74ed6510f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.293795] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec2d0be-08b9-416a-b738-104d2f4c619c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.324421] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1557a0-538f-43b8-9491-a80e16b8c087 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.334113] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70040bcf-faaa-41c9-9838-ee2e2e375a80 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.346906] env[63379]: DEBUG nova.compute.provider_tree [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1690.360737] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "refresh_cache-861cda26-f938-4b2e-ba3d-56b8469b6034" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.375500] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.375870] env[63379]: DEBUG nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance network_info: |[{"id": "bf9adade-286a-4e50-a0a5-a80cd17209c6", "address": "fa:16:3e:f3:83:85", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf9adade-28", "ovs_interfaceid": "bf9adade-286a-4e50-a0a5-a80cd17209c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1690.376710] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:83:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf9adade-286a-4e50-a0a5-a80cd17209c6', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1690.384405] env[63379]: DEBUG oslo.service.loopingcall [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1690.384656] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1690.384877] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79e6cb2e-f87b-488d-a153-8c9f6ab56a10 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.407525] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1690.407525] env[63379]: value = "task-1779805" [ 1690.407525] env[63379]: _type = "Task" [ 1690.407525] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.415716] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779805, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.502926] env[63379]: DEBUG nova.objects.instance [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'flavor' on Instance uuid fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1690.590757] env[63379]: DEBUG nova.network.neutron [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1690.602963] env[63379]: DEBUG oslo_vmware.api [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779802, 'name': PowerOnVM_Task, 'duration_secs': 0.827912} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.603252] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1690.605831] env[63379]: DEBUG nova.compute.manager [None req-c5193d7a-4526-453e-afd6-0952831749e7 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1690.606642] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7b1b79-571f-4131-b212-91127b5ea7bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.639868] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779801, 'name': CreateSnapshot_Task, 'duration_secs': 1.409484} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.640160] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1690.641510] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1017f5-ff67-4674-8778-9541fa82603b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.686180] env[63379]: INFO nova.compute.manager [-] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Took 1.29 seconds to deallocate network for instance. [ 1690.701207] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779804, 'name': ReconfigVM_Task, 'duration_secs': 0.299047} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.701523] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 8b33e64a-ea19-4974-8c2d-350615b1e061/8b33e64a-ea19-4974-8c2d-350615b1e061.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1690.702281] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.702512] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3772e970-2bf4-4e9a-9236-ec972037ae74 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.709632] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1690.709632] env[63379]: value = "task-1779806" [ 1690.709632] env[63379]: _type = "Task" [ 1690.709632] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.718259] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779806, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.792342] env[63379]: DEBUG nova.network.neutron [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.850095] env[63379]: DEBUG nova.scheduler.client.report [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1690.918410] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779805, 'name': CreateVM_Task, 'duration_secs': 0.509637} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.918590] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1690.919307] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.919473] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.919796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1690.920066] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca0dff0f-2ad4-4e9d-a6de-9a29a9809f6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.924274] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1690.924274] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5205a98b-863e-87cf-cd71-e9da9b84565e" [ 1690.924274] env[63379]: _type = "Task" [ 1690.924274] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.933207] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5205a98b-863e-87cf-cd71-e9da9b84565e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.007617] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9583064e-1fcd-40f7-9e14-4102db43a548 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.867s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.009635] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.307s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.009865] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.010098] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.010265] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.015014] env[63379]: INFO nova.compute.manager [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Terminating instance [ 1691.022036] env[63379]: DEBUG nova.compute.manager [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1691.022036] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1691.022036] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95b8c2ed-a391-41c9-8310-04a77f3b7384 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.030313] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1691.030313] env[63379]: value = "task-1779807" [ 1691.030313] env[63379]: _type = "Task" [ 1691.030313] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.038127] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779807, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.160517] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1691.160855] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ed3fa76e-1269-4161-b728-71bc10d805c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.169858] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1691.169858] env[63379]: value = "task-1779808" [ 1691.169858] env[63379]: _type = "Task" [ 1691.169858] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.180478] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779808, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.192626] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.220148] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779806, 'name': Rename_Task, 'duration_secs': 0.207671} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.220454] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1691.220698] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7f85524-01c8-47ee-a7a6-23ad85852efa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.229513] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1691.229513] env[63379]: value = "task-1779809" [ 1691.229513] env[63379]: _type = "Task" [ 1691.229513] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.239010] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779809, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.295560] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1a25674-cf68-4742-af11-42ca7beae76a req-b42ab233-6172-4968-a101-9d10f7f706b7 service nova] Releasing lock "refresh_cache-861cda26-f938-4b2e-ba3d-56b8469b6034" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.296195] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquired lock "refresh_cache-861cda26-f938-4b2e-ba3d-56b8469b6034" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.296474] env[63379]: DEBUG nova.network.neutron [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1691.355693] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.938s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.359029] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.453s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.359457] env[63379]: DEBUG nova.objects.instance [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lazy-loading 'resources' on Instance uuid ec1f7a44-7344-43fb-9d51-688731d8ce14 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1691.386988] env[63379]: INFO nova.scheduler.client.report [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Deleted allocations for instance aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae [ 1691.439176] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5205a98b-863e-87cf-cd71-e9da9b84565e, 'name': SearchDatastore_Task, 'duration_secs': 0.022749} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.440037] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.440037] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1691.440199] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.440396] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.441786] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1691.441786] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcc87ac1-8e6a-46d1-b63b-4ea1c77c9f1b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.450826] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1691.450826] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1691.451278] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bbde194-e0cf-483f-bbe5-2e81171a8269 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.460019] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1691.460019] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528634ac-d2d8-f5f5-fb95-f93943899f81" [ 1691.460019] env[63379]: _type = "Task" [ 1691.460019] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.465585] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528634ac-d2d8-f5f5-fb95-f93943899f81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.541602] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779807, 'name': PowerOffVM_Task, 'duration_secs': 0.490834} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.541893] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1691.542112] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1691.542314] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369424', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'name': 'volume-136f5b73-3e46-4a97-a860-2727b3e8d24e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f', 'attached_at': '', 'detached_at': '', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'serial': '136f5b73-3e46-4a97-a860-2727b3e8d24e'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1691.543176] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ad909e-e1cb-4b13-b40a-5fcdee3d7143 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.564872] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b7ad92a-9415-47ce-89b3-7d4b38ff5035 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.572373] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba99d8d-9f17-4bca-91a7-4487b49ad87a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.593934] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c060cc-2d9b-43cf-89e4-f637138e0d2e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.611463] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] The volume has not been displaced from its original location: [datastore1] volume-136f5b73-3e46-4a97-a860-2727b3e8d24e/volume-136f5b73-3e46-4a97-a860-2727b3e8d24e.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1691.616516] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Reconfiguring VM instance instance-00000047 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1691.616865] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c610f83-2976-46a4-bcf8-64779149e838 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.635824] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1691.635824] env[63379]: value = "task-1779810" [ 1691.635824] env[63379]: _type = "Task" [ 1691.635824] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.645376] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.679856] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779808, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.740473] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779809, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.833833] env[63379]: DEBUG nova.network.neutron [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1691.869765] env[63379]: INFO nova.compute.manager [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Unrescuing [ 1691.870093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.870286] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquired lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.870456] env[63379]: DEBUG nova.network.neutron [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1691.898480] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3308a90b-85a3-4fdd-b00e-018db6d7ac5f tempest-MigrationsAdminTest-2108201557 tempest-MigrationsAdminTest-2108201557-project-member] Lock "aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.448s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.970756] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528634ac-d2d8-f5f5-fb95-f93943899f81, 'name': SearchDatastore_Task, 'duration_secs': 0.01161} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.971556] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc38ad9c-cae5-40a1-aa43-b201a7710d6b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.979979] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1691.979979] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52309f7a-aabd-a551-8bdd-4980f680142e" [ 1691.979979] env[63379]: _type = "Task" [ 1691.979979] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.988861] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52309f7a-aabd-a551-8bdd-4980f680142e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.054669] env[63379]: DEBUG nova.network.neutron [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Updating instance_info_cache with network_info: [{"id": "d304bb93-6f61-492c-9e8c-ce1b0ac9131e", "address": "fa:16:3e:5c:45:78", "network": {"id": "15935301-b9ea-4551-a700-b73cc947f238", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2097808798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0383d184346141d2a770326aacff4352", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd304bb93-6f", "ovs_interfaceid": "d304bb93-6f61-492c-9e8c-ce1b0ac9131e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.149180] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779810, 'name': ReconfigVM_Task, 'duration_secs': 0.236382} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.149508] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Reconfigured VM instance instance-00000047 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1692.156896] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bb664f2-6fec-41fa-a80c-8eb78e7d495b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.176632] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1692.176632] env[63379]: value = "task-1779811" [ 1692.176632] env[63379]: _type = "Task" [ 1692.176632] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.186487] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779808, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.192546] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779811, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.223183] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee970d6d-0858-4831-98ff-328be1bc299b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.236020] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13531cbf-1b0d-4a11-82c2-4ba288e539ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.243025] env[63379]: DEBUG oslo_vmware.api [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779809, 'name': PowerOnVM_Task, 'duration_secs': 0.51538} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.269229] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1692.269573] env[63379]: INFO nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1692.269805] env[63379]: DEBUG nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1692.270995] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5145446d-4e04-47de-becd-28550aecfecc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.274258] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f023df1f-c457-489e-bd7e-a418f3e41a2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.286334] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3beeb7fa-cc83-42c9-b228-07f14af34a66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.300631] env[63379]: DEBUG nova.compute.provider_tree [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.490468] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52309f7a-aabd-a551-8bdd-4980f680142e, 'name': SearchDatastore_Task, 'duration_secs': 0.010632} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.490764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.490987] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1692.491325] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94e6d5c4-1eb0-43ae-af92-f498b2670e29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.501405] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1692.501405] env[63379]: value = "task-1779812" [ 1692.501405] env[63379]: _type = "Task" [ 1692.501405] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.510443] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779812, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.558690] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Releasing lock "refresh_cache-861cda26-f938-4b2e-ba3d-56b8469b6034" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.558932] env[63379]: DEBUG nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Instance network_info: |[{"id": "d304bb93-6f61-492c-9e8c-ce1b0ac9131e", "address": "fa:16:3e:5c:45:78", "network": {"id": "15935301-b9ea-4551-a700-b73cc947f238", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2097808798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0383d184346141d2a770326aacff4352", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd304bb93-6f", "ovs_interfaceid": "d304bb93-6f61-492c-9e8c-ce1b0ac9131e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1692.559371] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:45:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd304bb93-6f61-492c-9e8c-ce1b0ac9131e', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1692.568553] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Creating folder: Project (0383d184346141d2a770326aacff4352). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.568553] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6cc55e80-1ca2-4efc-b438-528da1e9d842 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.577329] env[63379]: DEBUG nova.network.neutron [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Updating instance_info_cache with network_info: [{"id": "7a48c3ef-9850-43b6-b138-d7cbb329face", "address": "fa:16:3e:d8:c3:c8", "network": {"id": "832e4609-8371-4d4b-8cfc-8a38039d24b7", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1517956996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c3562bb229474ba7aa3dae98def05260", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f267bcdd-0daa-4337-9709-5fc060c267d8", "external-id": "nsx-vlan-transportzone-308", "segmentation_id": 308, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a48c3ef-98", "ovs_interfaceid": "7a48c3ef-9850-43b6-b138-d7cbb329face", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.581133] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Created folder: Project (0383d184346141d2a770326aacff4352) in parent group-v369214. [ 1692.581335] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Creating folder: Instances. Parent ref: group-v369429. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.581586] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd7c7e28-6d7a-4177-be0d-1fad6afa0484 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.591832] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Created folder: Instances in parent group-v369429. [ 1692.591925] env[63379]: DEBUG oslo.service.loopingcall [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1692.592490] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1692.592914] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da715905-6bdb-4c28-b2f5-13751255e378 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.612031] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1692.612031] env[63379]: value = "task-1779815" [ 1692.612031] env[63379]: _type = "Task" [ 1692.612031] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.619923] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.681707] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779808, 'name': CloneVM_Task, 'duration_secs': 1.217665} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.685283] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Created linked-clone VM from snapshot [ 1692.685947] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9cb08b-1ddc-48cd-9920-c7c900860730 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.695721] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779811, 'name': ReconfigVM_Task, 'duration_secs': 0.214187} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.699612] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369424', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'name': 'volume-136f5b73-3e46-4a97-a860-2727b3e8d24e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f', 'attached_at': '', 'detached_at': '', 'volume_id': '136f5b73-3e46-4a97-a860-2727b3e8d24e', 'serial': '136f5b73-3e46-4a97-a860-2727b3e8d24e'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1692.700088] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1692.700393] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Uploading image aaf4c495-f5da-4072-a169-e26fb5e7b2f6 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1692.703147] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe1f2b7-0bbd-4c44-97a7-5180b951c92c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.712150] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1692.712725] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-409265bc-4d6d-43e1-a6b5-78220765199d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.714721] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1692.714970] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-da367ad5-b266-4c88-8963-961b27a835f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.724107] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1692.724107] env[63379]: value = "task-1779816" [ 1692.724107] env[63379]: _type = "Task" [ 1692.724107] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.733402] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779816, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.800516] env[63379]: INFO nova.compute.manager [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Took 32.60 seconds to build instance. [ 1692.803373] env[63379]: DEBUG nova.scheduler.client.report [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1692.821859] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1692.822634] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1692.822634] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleting the datastore file [datastore1] fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1692.823392] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38790da0-9d96-4783-a39e-eb2b770761ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.832876] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1692.832876] env[63379]: value = "task-1779818" [ 1692.832876] env[63379]: _type = "Task" [ 1692.832876] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.843385] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.016764] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779812, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.081181] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Releasing lock "refresh_cache-da66c3d9-ca03-4113-8703-64b666628936" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.082027] env[63379]: DEBUG nova.objects.instance [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lazy-loading 'flavor' on Instance uuid da66c3d9-ca03-4113-8703-64b666628936 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.123158] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.243097] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779816, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.303145] env[63379]: DEBUG oslo_concurrency.lockutils [None req-eff9cab0-b9c3-4d18-ba73-55359c888605 tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.116s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.309632] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.951s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.312400] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.392s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.312649] env[63379]: DEBUG nova.objects.instance [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lazy-loading 'resources' on Instance uuid 266cc3d5-c10d-4367-a879-d170802495db {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.340733] env[63379]: INFO nova.scheduler.client.report [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted allocations for instance ec1f7a44-7344-43fb-9d51-688731d8ce14 [ 1693.346600] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.514813] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779812, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.693402} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.515120] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1693.515337] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1693.515603] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf260aed-929c-4f88-b029-1ac0b8d1d035 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.524039] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1693.524039] env[63379]: value = "task-1779819" [ 1693.524039] env[63379]: _type = "Task" [ 1693.524039] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.531847] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779819, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.587661] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffbc0fd-7c75-405c-8926-36ba8340838c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.611068] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1693.611429] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62b12d70-b629-4b6f-b317-457b76d67fd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.620955] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.622248] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1693.622248] env[63379]: value = "task-1779820" [ 1693.622248] env[63379]: _type = "Task" [ 1693.622248] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.630244] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.736444] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779816, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.854125] env[63379]: DEBUG oslo_vmware.api [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.710883} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.854791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6a4ae491-cf15-42f2-b882-7377288a8f45 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "ec1f7a44-7344-43fb-9d51-688731d8ce14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.436s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.856122] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1693.856788] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1693.856788] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1693.857117] env[63379]: INFO nova.compute.manager [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Took 2.84 seconds to destroy the instance on the hypervisor. [ 1693.857500] env[63379]: DEBUG oslo.service.loopingcall [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1693.858080] env[63379]: DEBUG nova.compute.manager [-] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1693.858268] env[63379]: DEBUG nova.network.neutron [-] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1694.037817] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779819, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.199337} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.038104] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1694.038894] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d118286-fb3a-47a8-9799-04f4f8fe2bc8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.063776] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1694.066597] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dd6f2fa-9ce9-4289-8907-576f48a2a806 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.087288] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1694.087288] env[63379]: value = "task-1779821" [ 1694.087288] env[63379]: _type = "Task" [ 1694.087288] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.095649] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779821, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.131485] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.139728] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779820, 'name': PowerOffVM_Task, 'duration_secs': 0.437929} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.139842] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1694.145330] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1694.145669] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9331b33-f660-4b89-b6fc-ca6c52f7b84c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.170844] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1694.170844] env[63379]: value = "task-1779822" [ 1694.170844] env[63379]: _type = "Task" [ 1694.170844] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.187430] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779822, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.209629] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd347e5d-2142-4cbc-98ac-1b005f35030e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.219284] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7192a1-493d-408e-b319-082edbc991b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.225207] env[63379]: DEBUG nova.compute.manager [req-cbae2ea6-39ea-4297-a7d2-22e0c60303c0 req-19f9e8cc-3f66-4f47-bd78-cbd295f69991 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Received event network-vif-deleted-9ab93b8c-54a2-4b4a-aaa1-4c931e56286d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1694.225404] env[63379]: INFO nova.compute.manager [req-cbae2ea6-39ea-4297-a7d2-22e0c60303c0 req-19f9e8cc-3f66-4f47-bd78-cbd295f69991 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Neutron deleted interface 9ab93b8c-54a2-4b4a-aaa1-4c931e56286d; detaching it from the instance and deleting it from the info cache [ 1694.225575] env[63379]: DEBUG nova.network.neutron [req-cbae2ea6-39ea-4297-a7d2-22e0c60303c0 req-19f9e8cc-3f66-4f47-bd78-cbd295f69991 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.257206] env[63379]: DEBUG nova.compute.manager [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1694.262037] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34a3dbe-5818-4d8b-8351-334243e85780 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.265124] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e58e18-c886-4a1e-affc-e55a98738a35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.268099] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779816, 'name': Destroy_Task, 'duration_secs': 1.120836} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.268618] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Destroyed the VM [ 1694.268869] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1694.269876] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b8ac92e8-b9d7-45c6-a392-977dbc002752 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.278362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94db6281-56b7-4bfd-b6b1-fd84e6c3edda {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.283920] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1694.283920] env[63379]: value = "task-1779823" [ 1694.283920] env[63379]: _type = "Task" [ 1694.283920] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.295829] env[63379]: DEBUG nova.compute.provider_tree [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.301968] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779823, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.601589] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779821, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.626565] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.681930] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779822, 'name': ReconfigVM_Task, 'duration_secs': 0.311193} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.682261] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1694.682461] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1694.683665] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-771750a9-5a71-435f-9eba-96164501f1d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.691718] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1694.691718] env[63379]: value = "task-1779824" [ 1694.691718] env[63379]: _type = "Task" [ 1694.691718] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.693675] env[63379]: DEBUG nova.network.neutron [-] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.700468] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.729125] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7cfa45c-ce55-453b-ad35-98a3a2a8275f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.740161] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf1ab74-2c2e-4151-9d52-3daebffb2c9d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.779220] env[63379]: DEBUG nova.compute.manager [req-cbae2ea6-39ea-4297-a7d2-22e0c60303c0 req-19f9e8cc-3f66-4f47-bd78-cbd295f69991 service nova] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Detach interface failed, port_id=9ab93b8c-54a2-4b4a-aaa1-4c931e56286d, reason: Instance fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1694.784685] env[63379]: INFO nova.compute.manager [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] instance snapshotting [ 1694.790310] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34accec-3fd2-43d5-a3af-08bbbab04601 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.811776] env[63379]: DEBUG nova.scheduler.client.report [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1694.819023] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779823, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.819023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a1babc-6f1f-4164-9aea-30cbe88b4f54 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.895323] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "0324da80-b97c-4dc9-9083-199fbda60341" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.895563] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "0324da80-b97c-4dc9-9083-199fbda60341" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.895779] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "0324da80-b97c-4dc9-9083-199fbda60341-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.895963] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "0324da80-b97c-4dc9-9083-199fbda60341-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.896229] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "0324da80-b97c-4dc9-9083-199fbda60341-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.898359] env[63379]: INFO nova.compute.manager [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Terminating instance [ 1694.900042] env[63379]: DEBUG nova.compute.manager [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1694.900270] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1694.901116] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee4424d-a2a8-4297-9d95-685602ee1823 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.908948] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1694.909206] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61bb2d29-7cd2-4a0b-8252-fda5238ff392 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.914991] env[63379]: DEBUG oslo_vmware.api [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1694.914991] env[63379]: value = "task-1779825" [ 1694.914991] env[63379]: _type = "Task" [ 1694.914991] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.923111] env[63379]: DEBUG oslo_vmware.api [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.100500] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779821, 'name': ReconfigVM_Task, 'duration_secs': 0.529283} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.101068] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Reconfigured VM instance instance-0000004c to attach disk [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1695.101728] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b911ce7-970d-4605-867c-3f1dd2fe25ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.108539] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1695.108539] env[63379]: value = "task-1779826" [ 1695.108539] env[63379]: _type = "Task" [ 1695.108539] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.117314] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779826, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.126273] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.196058] env[63379]: INFO nova.compute.manager [-] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Took 1.34 seconds to deallocate network for instance. [ 1695.202497] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779824, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.296378] env[63379]: DEBUG oslo_vmware.api [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779823, 'name': RemoveSnapshot_Task, 'duration_secs': 0.945085} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.296584] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1695.320036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.320608] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.139s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.320955] env[63379]: DEBUG nova.objects.instance [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1695.335526] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1695.335526] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8fb24e9d-0832-465e-88de-b4f7a09fcc35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.346017] env[63379]: INFO nova.scheduler.client.report [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Deleted allocations for instance 266cc3d5-c10d-4367-a879-d170802495db [ 1695.347343] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1695.347343] env[63379]: value = "task-1779827" [ 1695.347343] env[63379]: _type = "Task" [ 1695.347343] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.360723] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779827, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.428928] env[63379]: DEBUG oslo_vmware.api [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779825, 'name': PowerOffVM_Task, 'duration_secs': 0.208794} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.428928] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1695.428928] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1695.428928] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c06755d6-a680-4628-a3b8-e2e6f5b9801c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.625510] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779826, 'name': Rename_Task, 'duration_secs': 0.307091} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.631346] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1695.631753] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-268c13e9-6b7e-45d6-adfe-b08e779f2e52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.642363] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.644158] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1695.644158] env[63379]: value = "task-1779829" [ 1695.644158] env[63379]: _type = "Task" [ 1695.644158] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.656694] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.700954] env[63379]: DEBUG oslo_vmware.api [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779824, 'name': PowerOnVM_Task, 'duration_secs': 0.563929} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.701358] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1695.701609] env[63379]: DEBUG nova.compute.manager [None req-f26ec8e6-3cdb-46d0-8785-f6a5bf9e0650 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1695.702413] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0468dfd2-21f5-4698-a10b-511379529bf4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.764833] env[63379]: INFO nova.compute.manager [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Took 0.57 seconds to detach 1 volumes for instance. [ 1695.802233] env[63379]: WARNING nova.compute.manager [None req-26e01ae9-6daa-4b92-93d1-a769a90d9fa3 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Image not found during snapshot: nova.exception.ImageNotFound: Image aaf4c495-f5da-4072-a169-e26fb5e7b2f6 could not be found. [ 1695.861798] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3ef7cc05-7eb0-401b-8b5c-16594cf9cc06 tempest-ServerMetadataTestJSON-1497600104 tempest-ServerMetadataTestJSON-1497600104-project-member] Lock "266cc3d5-c10d-4367-a879-d170802495db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.926s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.867625] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779827, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.060715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.060715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.060715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.060715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.060715] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.062982] env[63379]: INFO nova.compute.manager [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Terminating instance [ 1696.069018] env[63379]: DEBUG nova.compute.manager [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1696.069018] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1696.069018] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c4b8a8-5507-4e76-bbc8-0deb4496fed0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.076461] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.077312] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-511de83b-7b97-4dec-8183-2eacd3fa1c68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.084726] env[63379]: DEBUG oslo_vmware.api [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1696.084726] env[63379]: value = "task-1779830" [ 1696.084726] env[63379]: _type = "Task" [ 1696.084726] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.095520] env[63379]: DEBUG oslo_vmware.api [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.135869] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.156180] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.275893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.293032] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1696.293032] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1696.295272] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleting the datastore file [datastore1] 0324da80-b97c-4dc9-9083-199fbda60341 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1696.295272] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-138b631b-9773-4265-b9ea-fb2b5b49177f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.302996] env[63379]: DEBUG oslo_vmware.api [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1696.302996] env[63379]: value = "task-1779831" [ 1696.302996] env[63379]: _type = "Task" [ 1696.302996] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.315436] env[63379]: DEBUG oslo_vmware.api [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779831, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.330494] env[63379]: DEBUG oslo_concurrency.lockutils [None req-506a1a86-eb51-4aa2-9540-50035b4d65de tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.332209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.151s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.334428] env[63379]: INFO nova.compute.claims [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1696.367035] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779827, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.595501] env[63379]: DEBUG oslo_vmware.api [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779830, 'name': PowerOffVM_Task, 'duration_secs': 0.332135} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.596282] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.596547] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1696.596858] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5676987-cfc0-4bbf-8f78-64c79f29e23d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.626486] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "da66c3d9-ca03-4113-8703-64b666628936" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.626871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "da66c3d9-ca03-4113-8703-64b666628936" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.627124] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "da66c3d9-ca03-4113-8703-64b666628936-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.627381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "da66c3d9-ca03-4113-8703-64b666628936-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.627573] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "da66c3d9-ca03-4113-8703-64b666628936-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.640714] env[63379]: INFO nova.compute.manager [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Terminating instance [ 1696.643813] env[63379]: DEBUG nova.compute.manager [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1696.644101] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1696.645285] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f047a5-f79e-4f1c-882f-1140e495ea89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.652077] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779815, 'name': CreateVM_Task, 'duration_secs': 3.67486} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.655929] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1696.656724] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.657033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.657910] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1696.660532] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-992e9143-2f50-4fbe-9457-df5e2b8d234e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.662389] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.665670] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69aa3584-d1b3-4a46-b64c-443594ed091f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.667256] env[63379]: DEBUG oslo_vmware.api [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1779829, 'name': PowerOnVM_Task, 'duration_secs': 0.886349} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.667808] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1696.668074] env[63379]: INFO nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Took 9.75 seconds to spawn the instance on the hypervisor. [ 1696.668385] env[63379]: DEBUG nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1696.669362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa7e9b2-3935-4409-ac6f-ac0c6962213b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.674210] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1696.674210] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ae58a-c385-55f2-487f-423b9a4d4547" [ 1696.674210] env[63379]: _type = "Task" [ 1696.674210] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.675735] env[63379]: DEBUG oslo_vmware.api [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1696.675735] env[63379]: value = "task-1779833" [ 1696.675735] env[63379]: _type = "Task" [ 1696.675735] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.692131] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ae58a-c385-55f2-487f-423b9a4d4547, 'name': SearchDatastore_Task, 'duration_secs': 0.012134} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.696417] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.696679] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1696.696923] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.697092] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.697520] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1696.697612] env[63379]: DEBUG oslo_vmware.api [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.697828] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3eac61a2-a189-4a89-86bd-90a084c695e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.706493] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1696.706755] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1696.707492] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fdf924b-22a4-4133-b519-4d9ed9503140 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.713340] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1696.713340] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b17476-e12d-2c1e-8120-11ce36e704ed" [ 1696.713340] env[63379]: _type = "Task" [ 1696.713340] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.725032] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b17476-e12d-2c1e-8120-11ce36e704ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009329} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.727377] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-decea1d1-7681-4fda-8ddc-f91d3cf72dd8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.733346] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1696.733346] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa3ae8-dbbb-89df-b6df-ff74065b7733" [ 1696.733346] env[63379]: _type = "Task" [ 1696.733346] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.738547] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1696.738765] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1696.739050] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleting the datastore file [datastore1] 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1696.739674] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e76c38d1-fb20-46ac-a8f2-6cae39760e6f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.744070] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa3ae8-dbbb-89df-b6df-ff74065b7733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.748697] env[63379]: DEBUG oslo_vmware.api [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1696.748697] env[63379]: value = "task-1779834" [ 1696.748697] env[63379]: _type = "Task" [ 1696.748697] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.756468] env[63379]: DEBUG oslo_vmware.api [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.813443] env[63379]: DEBUG oslo_vmware.api [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779831, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415409} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.813735] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1696.813926] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1696.814463] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1696.814737] env[63379]: INFO nova.compute.manager [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Took 1.91 seconds to destroy the instance on the hypervisor. [ 1696.815336] env[63379]: DEBUG oslo.service.loopingcall [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1696.815573] env[63379]: DEBUG nova.compute.manager [-] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1696.815673] env[63379]: DEBUG nova.network.neutron [-] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1696.863973] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779827, 'name': CreateSnapshot_Task, 'duration_secs': 1.178067} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.864208] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1696.865076] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6def90-e551-4db8-b8a7-331d74334d4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.200376] env[63379]: DEBUG oslo_vmware.api [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779833, 'name': PowerOffVM_Task, 'duration_secs': 0.189403} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.200705] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1697.200910] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1697.201193] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86f71fa1-8c9a-45ad-8dfb-9424eb8fbabb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.206937] env[63379]: INFO nova.compute.manager [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Took 29.67 seconds to build instance. [ 1697.215365] env[63379]: DEBUG nova.compute.manager [req-877eb199-4231-4f9a-a7fe-92c251d9278d req-2a8f5859-d6ec-4ee3-8705-c469226620a4 service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Received event network-vif-deleted-269637bb-41c4-433e-aaab-1c67c39977b1 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1697.215850] env[63379]: INFO nova.compute.manager [req-877eb199-4231-4f9a-a7fe-92c251d9278d req-2a8f5859-d6ec-4ee3-8705-c469226620a4 service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Neutron deleted interface 269637bb-41c4-433e-aaab-1c67c39977b1; detaching it from the instance and deleting it from the info cache [ 1697.215930] env[63379]: DEBUG nova.network.neutron [req-877eb199-4231-4f9a-a7fe-92c251d9278d req-2a8f5859-d6ec-4ee3-8705-c469226620a4 service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.249206] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa3ae8-dbbb-89df-b6df-ff74065b7733, 'name': SearchDatastore_Task, 'duration_secs': 0.011511} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.249856] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.250150] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 861cda26-f938-4b2e-ba3d-56b8469b6034/861cda26-f938-4b2e-ba3d-56b8469b6034.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1697.253382] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edfa2690-9a00-47d4-92cb-e62ef81490c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.265114] env[63379]: DEBUG oslo_vmware.api [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189362} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.265114] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1697.265114] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1697.265114] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1697.265114] env[63379]: INFO nova.compute.manager [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1697.266043] env[63379]: DEBUG oslo.service.loopingcall [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.266471] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1697.266471] env[63379]: value = "task-1779836" [ 1697.266471] env[63379]: _type = "Task" [ 1697.266471] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.267166] env[63379]: DEBUG nova.compute.manager [-] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1697.267414] env[63379]: DEBUG nova.network.neutron [-] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1697.280054] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.353899] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1697.354362] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1697.354690] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Deleting the datastore file [datastore1] da66c3d9-ca03-4113-8703-64b666628936 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1697.355631] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1a8735f-a3c7-4a08-9cc0-4193a7bc35c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.365629] env[63379]: DEBUG oslo_vmware.api [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1697.365629] env[63379]: value = "task-1779837" [ 1697.365629] env[63379]: _type = "Task" [ 1697.365629] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.374806] env[63379]: DEBUG oslo_vmware.api [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.382966] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1697.383305] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2fd4792a-27cd-413f-b9a9-6a6b64da1b24 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.395866] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1697.395866] env[63379]: value = "task-1779838" [ 1697.395866] env[63379]: _type = "Task" [ 1697.395866] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.411143] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779838, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.663275] env[63379]: DEBUG nova.network.neutron [-] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.709357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c7807320-4eb6-47ae-8927-214dbef5d2e9 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "a7cce485-7476-4ea1-b127-68d879e164cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.204s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.719183] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9fbe9de-d8b1-4622-9420-3ab2ac1e033b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.731200] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff348075-6e6d-4504-ab9d-544c8e92b256 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.775154] env[63379]: DEBUG nova.compute.manager [req-877eb199-4231-4f9a-a7fe-92c251d9278d req-2a8f5859-d6ec-4ee3-8705-c469226620a4 service nova] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Detach interface failed, port_id=269637bb-41c4-433e-aaab-1c67c39977b1, reason: Instance 0324da80-b97c-4dc9-9083-199fbda60341 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1697.790130] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779836, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.798846] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2afad96-3b41-44f6-9a3f-43f21f2c3b9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.806326] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2674a453-dbec-4353-9d26-f48e4c4476a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.844453] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21c719c-460c-4a4b-a6fd-faec349d449e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.850875] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d69cab-2875-4c99-8741-8d2f9e25743c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.871309] env[63379]: DEBUG nova.compute.provider_tree [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1697.886930] env[63379]: DEBUG oslo_vmware.api [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.496174} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.887234] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1697.887440] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1697.887626] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1697.887810] env[63379]: INFO nova.compute.manager [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: da66c3d9-ca03-4113-8703-64b666628936] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1697.888112] env[63379]: DEBUG oslo.service.loopingcall [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.888502] env[63379]: DEBUG nova.compute.manager [-] [instance: da66c3d9-ca03-4113-8703-64b666628936] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1697.888502] env[63379]: DEBUG nova.network.neutron [-] [instance: da66c3d9-ca03-4113-8703-64b666628936] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1697.914092] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779838, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.169810] env[63379]: INFO nova.compute.manager [-] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Took 1.35 seconds to deallocate network for instance. [ 1698.288748] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59562} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.288993] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 861cda26-f938-4b2e-ba3d-56b8469b6034/861cda26-f938-4b2e-ba3d-56b8469b6034.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1698.289229] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1698.289488] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ea69c44-6837-49d1-ae99-57c4c8714250 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.296912] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1698.296912] env[63379]: value = "task-1779839" [ 1698.296912] env[63379]: _type = "Task" [ 1698.296912] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.306228] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779839, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.398685] env[63379]: ERROR nova.scheduler.client.report [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [req-04b74d3d-3676-4853-8a1e-3fa1a5234fc6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-04b74d3d-3676-4853-8a1e-3fa1a5234fc6"}]} [ 1698.407966] env[63379]: DEBUG nova.network.neutron [-] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.418042] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779838, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.425976] env[63379]: DEBUG nova.scheduler.client.report [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1698.453038] env[63379]: DEBUG nova.scheduler.client.report [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1698.453348] env[63379]: DEBUG nova.compute.provider_tree [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1698.471613] env[63379]: DEBUG nova.scheduler.client.report [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1698.504410] env[63379]: DEBUG nova.scheduler.client.report [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1698.570071] env[63379]: DEBUG nova.compute.manager [req-fad9732d-2e03-4fa1-a4ed-44dd743dd34a req-823d24ff-d81a-48ea-8f3c-e267101e74d1 service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Received event network-vif-deleted-7a48c3ef-9850-43b6-b138-d7cbb329face {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1698.570307] env[63379]: INFO nova.compute.manager [req-fad9732d-2e03-4fa1-a4ed-44dd743dd34a req-823d24ff-d81a-48ea-8f3c-e267101e74d1 service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Neutron deleted interface 7a48c3ef-9850-43b6-b138-d7cbb329face; detaching it from the instance and deleting it from the info cache [ 1698.570483] env[63379]: DEBUG nova.network.neutron [req-fad9732d-2e03-4fa1-a4ed-44dd743dd34a req-823d24ff-d81a-48ea-8f3c-e267101e74d1 service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.675325] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.813592] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779839, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060757} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.813873] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1698.815506] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a077c490-daa8-4164-a92d-6a4035afd962 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.845788] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 861cda26-f938-4b2e-ba3d-56b8469b6034/861cda26-f938-4b2e-ba3d-56b8469b6034.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1698.849831] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a94c35c-7361-45e1-8060-e97e52bd18a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.871562] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1698.871562] env[63379]: value = "task-1779840" [ 1698.871562] env[63379]: _type = "Task" [ 1698.871562] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.884933] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779840, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.890538] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4ae961-d1cf-418b-a3c2-95438b9ecb81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.898174] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea53f3e7-8431-45dc-8941-271ee4c48d48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.909406] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779838, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.934869] env[63379]: INFO nova.compute.manager [-] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Took 1.67 seconds to deallocate network for instance. [ 1698.937421] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8ec20d-2c8d-4053-842a-5f4085a1d453 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.949221] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d96996-219e-43cd-9b73-567d00535c0f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.964992] env[63379]: DEBUG nova.compute.provider_tree [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.025930] env[63379]: DEBUG nova.network.neutron [-] [instance: da66c3d9-ca03-4113-8703-64b666628936] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.074053] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb76858e-8f98-4a28-b6af-ea04b53ad069 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.084294] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b52054f-a2bd-43ab-bcae-7ebebfa7d658 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.116249] env[63379]: DEBUG nova.compute.manager [req-fad9732d-2e03-4fa1-a4ed-44dd743dd34a req-823d24ff-d81a-48ea-8f3c-e267101e74d1 service nova] [instance: da66c3d9-ca03-4113-8703-64b666628936] Detach interface failed, port_id=7a48c3ef-9850-43b6-b138-d7cbb329face, reason: Instance da66c3d9-ca03-4113-8703-64b666628936 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1699.254254] env[63379]: DEBUG nova.compute.manager [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Received event network-vif-deleted-9b6c5265-06a3-4c47-b5cf-a656af402fa9 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1699.254538] env[63379]: DEBUG nova.compute.manager [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Received event network-changed-bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1699.254658] env[63379]: DEBUG nova.compute.manager [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Refreshing instance network info cache due to event network-changed-bf9adade-286a-4e50-a0a5-a80cd17209c6. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1699.254883] env[63379]: DEBUG oslo_concurrency.lockutils [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] Acquiring lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.255119] env[63379]: DEBUG oslo_concurrency.lockutils [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] Acquired lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.255357] env[63379]: DEBUG nova.network.neutron [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Refreshing network info cache for port bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1699.390838] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779840, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.412485] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779838, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.444901] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.471124] env[63379]: DEBUG nova.scheduler.client.report [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1699.528558] env[63379]: INFO nova.compute.manager [-] [instance: da66c3d9-ca03-4113-8703-64b666628936] Took 1.64 seconds to deallocate network for instance. [ 1699.887531] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779840, 'name': ReconfigVM_Task, 'duration_secs': 0.757711} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.887531] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 861cda26-f938-4b2e-ba3d-56b8469b6034/861cda26-f938-4b2e-ba3d-56b8469b6034.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1699.887531] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aae64a39-c1ea-4e23-bf8f-26950f23a41c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.893074] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1699.893074] env[63379]: value = "task-1779841" [ 1699.893074] env[63379]: _type = "Task" [ 1699.893074] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.903488] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779841, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.917510] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779838, 'name': CloneVM_Task, 'duration_secs': 2.274991} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.918301] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Created linked-clone VM from snapshot [ 1699.919468] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6e21e5-8d38-4305-b4e2-d08c3bdd8b23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.930111] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Uploading image a0c852ff-95e4-4fa8-a275-b52fef4c5e38 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1699.946356] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1699.946711] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-60edea04-2523-43b4-8e6d-48a363026962 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.961403] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1699.961403] env[63379]: value = "task-1779842" [ 1699.961403] env[63379]: _type = "Task" [ 1699.961403] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.976125] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779842, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.976329] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.644s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.976822] env[63379]: DEBUG nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1699.980168] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.307s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.980636] env[63379]: DEBUG nova.objects.instance [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lazy-loading 'resources' on Instance uuid 41952d7b-ce23-4e9b-8843-bbac1d3099c1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.035690] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.056564] env[63379]: DEBUG nova.network.neutron [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Updated VIF entry in instance network info cache for port bf9adade-286a-4e50-a0a5-a80cd17209c6. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1700.056971] env[63379]: DEBUG nova.network.neutron [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Updating instance_info_cache with network_info: [{"id": "bf9adade-286a-4e50-a0a5-a80cd17209c6", "address": "fa:16:3e:f3:83:85", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf9adade-28", "ovs_interfaceid": "bf9adade-286a-4e50-a0a5-a80cd17209c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.405138] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779841, 'name': Rename_Task, 'duration_secs': 0.191217} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.405448] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1700.405706] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-661a16eb-b3fd-48d4-a00b-a19a6c51bf45 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.413202] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1700.413202] env[63379]: value = "task-1779843" [ 1700.413202] env[63379]: _type = "Task" [ 1700.413202] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.422373] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.473815] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779842, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.487744] env[63379]: DEBUG nova.compute.utils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1700.488839] env[63379]: DEBUG nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1700.488958] env[63379]: DEBUG nova.network.neutron [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1700.562654] env[63379]: DEBUG oslo_concurrency.lockutils [req-ac9f652e-09ad-4462-897f-03d23673a993 req-33ec0174-e016-4b6d-bd0d-1b20a4b11a4e service nova] Releasing lock "refresh_cache-a7cce485-7476-4ea1-b127-68d879e164cd" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.567600] env[63379]: DEBUG nova.policy [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd786092d394d1a9b444051664ac7ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f28f4532d464e6eb90ab75799990c85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1700.817104] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e438c9e5-90ed-4366-8a09-e810bd537895 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.826506] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774a4072-c785-4cec-a68b-f09b7e8dec9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.863596] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccae5df-9641-4551-9b48-8c301dbc438f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.871459] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abf1879-4cb1-4257-8590-a8124f1b80b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.889365] env[63379]: DEBUG nova.compute.provider_tree [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.925424] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779843, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.974109] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779842, 'name': Destroy_Task, 'duration_secs': 0.931052} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.975093] env[63379]: DEBUG nova.network.neutron [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Successfully created port: 9d36760a-dfa7-4ce6-b2db-a72018c2a272 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1700.977809] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Destroyed the VM [ 1700.978104] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1700.978437] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c3bcd3fd-0a80-47de-bd3d-32632dcc8c3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.986026] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1700.986026] env[63379]: value = "task-1779844" [ 1700.986026] env[63379]: _type = "Task" [ 1700.986026] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.992735] env[63379]: DEBUG nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1701.001670] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779844, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.396860] env[63379]: DEBUG nova.scheduler.client.report [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1701.431454] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779843, 'name': PowerOnVM_Task} progress is 93%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.501515] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779844, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.903280] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.923s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.906128] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.713s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.906128] env[63379]: DEBUG nova.objects.instance [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'resources' on Instance uuid c1858f41-75e7-4eee-a6db-493e150622ef {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1701.925177] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779843, 'name': PowerOnVM_Task} progress is 93%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.931269] env[63379]: INFO nova.scheduler.client.report [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Deleted allocations for instance 41952d7b-ce23-4e9b-8843-bbac1d3099c1 [ 1701.997475] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779844, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.003117] env[63379]: DEBUG nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1702.030425] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1702.030718] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1702.030893] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1702.031103] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1702.031449] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1702.031449] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1702.031616] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1702.031780] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1702.031974] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1702.032127] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1702.032305] env[63379]: DEBUG nova.virt.hardware [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1702.033193] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79d87d0-993a-49e9-a06d-f023e8bbb464 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.041989] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6619999-6df4-449f-bd6c-9e04bd718ec0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.434697] env[63379]: DEBUG nova.compute.manager [req-775969db-9311-46d6-b656-d7ec9421675d req-7ee98192-8ba4-4686-80ab-a40035e8a885 service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Received event network-vif-plugged-9d36760a-dfa7-4ce6-b2db-a72018c2a272 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1702.434922] env[63379]: DEBUG oslo_concurrency.lockutils [req-775969db-9311-46d6-b656-d7ec9421675d req-7ee98192-8ba4-4686-80ab-a40035e8a885 service nova] Acquiring lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.435324] env[63379]: DEBUG oslo_concurrency.lockutils [req-775969db-9311-46d6-b656-d7ec9421675d req-7ee98192-8ba4-4686-80ab-a40035e8a885 service nova] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.435324] env[63379]: DEBUG oslo_concurrency.lockutils [req-775969db-9311-46d6-b656-d7ec9421675d req-7ee98192-8ba4-4686-80ab-a40035e8a885 service nova] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.435489] env[63379]: DEBUG nova.compute.manager [req-775969db-9311-46d6-b656-d7ec9421675d req-7ee98192-8ba4-4686-80ab-a40035e8a885 service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] No waiting events found dispatching network-vif-plugged-9d36760a-dfa7-4ce6-b2db-a72018c2a272 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1702.435656] env[63379]: WARNING nova.compute.manager [req-775969db-9311-46d6-b656-d7ec9421675d req-7ee98192-8ba4-4686-80ab-a40035e8a885 service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Received unexpected event network-vif-plugged-9d36760a-dfa7-4ce6-b2db-a72018c2a272 for instance with vm_state building and task_state spawning. [ 1702.436348] env[63379]: DEBUG oslo_vmware.api [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779843, 'name': PowerOnVM_Task, 'duration_secs': 1.851156} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.439160] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1702.439387] env[63379]: INFO nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Took 13.04 seconds to spawn the instance on the hypervisor. [ 1702.439573] env[63379]: DEBUG nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1702.442671] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef8454b-160d-4224-aac5-6f89d7e56b2f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.445474] env[63379]: DEBUG oslo_concurrency.lockutils [None req-df8295bb-d2ff-49ec-b548-1f0e8c489bce tempest-VolumesAdminNegativeTest-1899328954 tempest-VolumesAdminNegativeTest-1899328954-project-member] Lock "41952d7b-ce23-4e9b-8843-bbac1d3099c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.078s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.502962] env[63379]: DEBUG oslo_vmware.api [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779844, 'name': RemoveSnapshot_Task, 'duration_secs': 1.300687} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.503253] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1702.533059] env[63379]: DEBUG nova.network.neutron [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Successfully updated port: 9d36760a-dfa7-4ce6-b2db-a72018c2a272 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1702.696700] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f7edd7-80ee-43d2-a7de-730829329e91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.704869] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73951674-0863-49e0-b4fc-e815a9c82d98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.739307] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de93c3b2-88b0-4fe7-8e5c-989df2565d4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.744985] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.745168] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.751388] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40080192-f526-415f-a03c-606e0c132182 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.765341] env[63379]: DEBUG nova.compute.provider_tree [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1702.960044] env[63379]: INFO nova.compute.manager [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Took 34.70 seconds to build instance. [ 1703.011922] env[63379]: WARNING nova.compute.manager [None req-2b323f14-bb24-43ec-9b3f-fb7f518ace8e tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Image not found during snapshot: nova.exception.ImageNotFound: Image a0c852ff-95e4-4fa8-a275-b52fef4c5e38 could not be found. [ 1703.036874] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-fad7a2dd-291f-4105-95a6-56bdbcc7acb4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.036874] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-fad7a2dd-291f-4105-95a6-56bdbcc7acb4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.036874] env[63379]: DEBUG nova.network.neutron [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1703.248380] env[63379]: DEBUG nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1703.268949] env[63379]: DEBUG nova.scheduler.client.report [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1703.463310] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3cccda93-c9bb-46c3-a75e-a7382f9abe95 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.211s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.495087] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "8b33e64a-ea19-4974-8c2d-350615b1e061" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.495371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.495594] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "8b33e64a-ea19-4974-8c2d-350615b1e061-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.495788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.496267] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.498468] env[63379]: INFO nova.compute.manager [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Terminating instance [ 1703.501980] env[63379]: DEBUG nova.compute.manager [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1703.502237] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1703.503102] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3b668d-4df1-4129-84f9-f904117547e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.510830] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1703.511068] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a02995e-7287-4687-aa73-90a903d367e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.517872] env[63379]: DEBUG oslo_vmware.api [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1703.517872] env[63379]: value = "task-1779845" [ 1703.517872] env[63379]: _type = "Task" [ 1703.517872] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.525640] env[63379]: DEBUG oslo_vmware.api [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.578389] env[63379]: DEBUG nova.network.neutron [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1703.778802] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.782439] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.782740] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.508s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.785016] env[63379]: DEBUG nova.objects.instance [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'resources' on Instance uuid fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1703.788096] env[63379]: DEBUG nova.network.neutron [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Updating instance_info_cache with network_info: [{"id": "9d36760a-dfa7-4ce6-b2db-a72018c2a272", "address": "fa:16:3e:d4:3c:5d", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d36760a-df", "ovs_interfaceid": "9d36760a-dfa7-4ce6-b2db-a72018c2a272", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.799444] env[63379]: INFO nova.scheduler.client.report [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted allocations for instance c1858f41-75e7-4eee-a6db-493e150622ef [ 1704.028968] env[63379]: DEBUG oslo_vmware.api [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779845, 'name': PowerOffVM_Task, 'duration_secs': 0.264698} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.029444] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1704.029627] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1704.030225] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff1793e0-fba6-4aef-9788-0622589ba486 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.106471] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1704.106737] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1704.107210] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleting the datastore file [datastore1] 8b33e64a-ea19-4974-8c2d-350615b1e061 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1704.107210] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3955e4ce-a1a1-44d9-b1db-819b21139ad7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.114988] env[63379]: DEBUG oslo_vmware.api [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for the task: (returnval){ [ 1704.114988] env[63379]: value = "task-1779847" [ 1704.114988] env[63379]: _type = "Task" [ 1704.114988] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.123966] env[63379]: DEBUG oslo_vmware.api [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.293619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-fad7a2dd-291f-4105-95a6-56bdbcc7acb4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.293948] env[63379]: DEBUG nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Instance network_info: |[{"id": "9d36760a-dfa7-4ce6-b2db-a72018c2a272", "address": "fa:16:3e:d4:3c:5d", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d36760a-df", "ovs_interfaceid": "9d36760a-dfa7-4ce6-b2db-a72018c2a272", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1704.294705] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:3c:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d36760a-dfa7-4ce6-b2db-a72018c2a272', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1704.302995] env[63379]: DEBUG oslo.service.loopingcall [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.303270] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1704.303527] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c9001c4-c2a6-4049-bb31-b18920c61918 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.323689] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3b066b37-9cb0-4672-b2a9-366ca5e9c273 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "c1858f41-75e7-4eee-a6db-493e150622ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.706s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.328486] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1704.328486] env[63379]: value = "task-1779848" [ 1704.328486] env[63379]: _type = "Task" [ 1704.328486] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.337892] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779848, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.399689] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "861cda26-f938-4b2e-ba3d-56b8469b6034" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.400016] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.400281] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "861cda26-f938-4b2e-ba3d-56b8469b6034-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.400512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.400697] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.403132] env[63379]: INFO nova.compute.manager [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Terminating instance [ 1704.406222] env[63379]: DEBUG nova.compute.manager [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1704.406416] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1704.408492] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19f05b6-db08-4791-8e69-8c613efab78a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.418856] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1704.419142] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ff846b9-a6bc-435c-b24e-075f1709630c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.428180] env[63379]: DEBUG oslo_vmware.api [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1704.428180] env[63379]: value = "task-1779849" [ 1704.428180] env[63379]: _type = "Task" [ 1704.428180] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.441014] env[63379]: DEBUG oslo_vmware.api [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779849, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.458923] env[63379]: DEBUG nova.compute.manager [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Received event network-changed-9d36760a-dfa7-4ce6-b2db-a72018c2a272 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1704.459152] env[63379]: DEBUG nova.compute.manager [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Refreshing instance network info cache due to event network-changed-9d36760a-dfa7-4ce6-b2db-a72018c2a272. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1704.459370] env[63379]: DEBUG oslo_concurrency.lockutils [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] Acquiring lock "refresh_cache-fad7a2dd-291f-4105-95a6-56bdbcc7acb4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.459516] env[63379]: DEBUG oslo_concurrency.lockutils [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] Acquired lock "refresh_cache-fad7a2dd-291f-4105-95a6-56bdbcc7acb4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.459679] env[63379]: DEBUG nova.network.neutron [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Refreshing network info cache for port 9d36760a-dfa7-4ce6-b2db-a72018c2a272 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1704.615547] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11dc98d-f13e-4aff-bfa6-f03f883d3835 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.630033] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2977420b-8b85-4f7a-8bcb-9926ea523936 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.632943] env[63379]: DEBUG oslo_vmware.api [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Task: {'id': task-1779847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133707} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.633229] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1704.633418] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1704.633593] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1704.633799] env[63379]: INFO nova.compute.manager [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1704.634057] env[63379]: DEBUG oslo.service.loopingcall [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.634783] env[63379]: DEBUG nova.compute.manager [-] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1704.634783] env[63379]: DEBUG nova.network.neutron [-] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1704.664114] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b122ac88-f654-4863-9da4-d6780a6bd1fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.672934] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b31496-8bae-479b-a899-c4e8f80833b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.690024] env[63379]: DEBUG nova.compute.provider_tree [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1704.838928] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779848, 'name': CreateVM_Task, 'duration_secs': 0.456048} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.839146] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1704.840220] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.840220] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.840751] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1704.840991] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50b30442-5722-4203-bcef-293c70cd0594 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.845817] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1704.845817] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252da0c-6988-5d9b-df6a-6864e55e41d2" [ 1704.845817] env[63379]: _type = "Task" [ 1704.845817] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.853993] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252da0c-6988-5d9b-df6a-6864e55e41d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.942796] env[63379]: DEBUG oslo_vmware.api [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779849, 'name': PowerOffVM_Task, 'duration_secs': 0.289219} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.943102] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1704.943410] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1704.943797] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-547b9949-6dd1-4b59-b0fd-94e70729438b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.080052] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1705.080695] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1705.080695] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Deleting the datastore file [datastore1] 861cda26-f938-4b2e-ba3d-56b8469b6034 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1705.080920] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c9a0fb4-56be-492d-9dc8-6f38f76d0958 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.088566] env[63379]: DEBUG oslo_vmware.api [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for the task: (returnval){ [ 1705.088566] env[63379]: value = "task-1779851" [ 1705.088566] env[63379]: _type = "Task" [ 1705.088566] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.095923] env[63379]: DEBUG oslo_vmware.api [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.212355] env[63379]: ERROR nova.scheduler.client.report [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [req-5f6f3940-8cf4-4d70-b1d1-9b2fcfd2e8d2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5f6f3940-8cf4-4d70-b1d1-9b2fcfd2e8d2"}]} [ 1705.230018] env[63379]: DEBUG nova.scheduler.client.report [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1705.231861] env[63379]: DEBUG nova.network.neutron [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Updated VIF entry in instance network info cache for port 9d36760a-dfa7-4ce6-b2db-a72018c2a272. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1705.232228] env[63379]: DEBUG nova.network.neutron [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Updating instance_info_cache with network_info: [{"id": "9d36760a-dfa7-4ce6-b2db-a72018c2a272", "address": "fa:16:3e:d4:3c:5d", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d36760a-df", "ovs_interfaceid": "9d36760a-dfa7-4ce6-b2db-a72018c2a272", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.249567] env[63379]: DEBUG nova.scheduler.client.report [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1705.249806] env[63379]: DEBUG nova.compute.provider_tree [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.262143] env[63379]: DEBUG nova.scheduler.client.report [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1705.280518] env[63379]: DEBUG nova.scheduler.client.report [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1705.355589] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252da0c-6988-5d9b-df6a-6864e55e41d2, 'name': SearchDatastore_Task, 'duration_secs': 0.012545} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.356169] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.356515] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1705.356810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.357010] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.357347] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1705.361392] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68615dcc-f907-4d8a-adae-dfcc1936b830 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.368051] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1705.368255] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1705.368974] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bd2d347-0033-40ae-97cd-2077ba15a7d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.374107] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1705.374107] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52093882-e0c6-f453-485f-7efdf5fb38d6" [ 1705.374107] env[63379]: _type = "Task" [ 1705.374107] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.384252] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52093882-e0c6-f453-485f-7efdf5fb38d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.407382] env[63379]: DEBUG nova.network.neutron [-] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.581425] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0914a7a2-8f04-4508-a5d6-d50e4591e2e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.595605] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170a4300-8e1e-4d79-9ec8-8e84dff7e7e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.603820] env[63379]: DEBUG oslo_vmware.api [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Task: {'id': task-1779851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39538} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.629816] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1705.630020] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1705.630219] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1705.630407] env[63379]: INFO nova.compute.manager [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1705.630675] env[63379]: DEBUG oslo.service.loopingcall [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1705.631074] env[63379]: DEBUG nova.compute.manager [-] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1705.631177] env[63379]: DEBUG nova.network.neutron [-] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1705.633328] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4215fb9d-d10a-4e12-baf6-51cef4ed8741 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.641381] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88f164c-8b2e-4f0b-aa32-0e543bbae0b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.659224] env[63379]: DEBUG nova.compute.provider_tree [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.738351] env[63379]: DEBUG oslo_concurrency.lockutils [req-23b71a90-47f5-4732-b5e3-c16cc212ec09 req-3ed5cddb-a5a6-48eb-b526-9ae55eab38ac service nova] Releasing lock "refresh_cache-fad7a2dd-291f-4105-95a6-56bdbcc7acb4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.887254] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52093882-e0c6-f453-485f-7efdf5fb38d6, 'name': SearchDatastore_Task, 'duration_secs': 0.009404} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.887862] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93a9540b-495a-4eb8-8db4-7d50588358a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.893329] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1705.893329] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]523b0954-86dc-7799-f732-7b7c20c63859" [ 1705.893329] env[63379]: _type = "Task" [ 1705.893329] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.901116] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523b0954-86dc-7799-f732-7b7c20c63859, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.910890] env[63379]: INFO nova.compute.manager [-] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Took 1.28 seconds to deallocate network for instance. [ 1706.195161] env[63379]: DEBUG nova.scheduler.client.report [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1706.195578] env[63379]: DEBUG nova.compute.provider_tree [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 97 to 98 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1706.195729] env[63379]: DEBUG nova.compute.provider_tree [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1706.334362] env[63379]: DEBUG nova.network.neutron [-] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.404489] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523b0954-86dc-7799-f732-7b7c20c63859, 'name': SearchDatastore_Task, 'duration_secs': 0.008844} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.404764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.405118] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1706.405394] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2207617a-4e1d-4c2c-a0a8-803a9d37013e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.411971] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1706.411971] env[63379]: value = "task-1779852" [ 1706.411971] env[63379]: _type = "Task" [ 1706.411971] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.416684] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.421420] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.486965] env[63379]: DEBUG nova.compute.manager [req-799cfb01-ea1f-4960-936b-8aed7d8d61fe req-a1e37ee0-46d1-4548-a09a-3767bb6e4f96 service nova] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Received event network-vif-deleted-13196237-c6ec-4167-b9f2-5818ee2ad126 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1706.487225] env[63379]: DEBUG nova.compute.manager [req-799cfb01-ea1f-4960-936b-8aed7d8d61fe req-a1e37ee0-46d1-4548-a09a-3767bb6e4f96 service nova] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Received event network-vif-deleted-d304bb93-6f61-492c-9e8c-ce1b0ac9131e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1706.701843] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.919s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.705247] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.030s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.705714] env[63379]: DEBUG nova.objects.instance [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lazy-loading 'resources' on Instance uuid 0324da80-b97c-4dc9-9083-199fbda60341 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1706.728768] env[63379]: INFO nova.scheduler.client.report [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted allocations for instance fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f [ 1706.837170] env[63379]: INFO nova.compute.manager [-] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Took 1.21 seconds to deallocate network for instance. [ 1706.922350] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450312} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.922592] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1706.922802] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1706.923070] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7688f744-038f-44ac-8e62-51c8d0d9d954 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.929159] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1706.929159] env[63379]: value = "task-1779853" [ 1706.929159] env[63379]: _type = "Task" [ 1706.929159] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.937165] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779853, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.236664] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2e20212b-683c-4c0d-b846-b94715ab7443 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.227s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.345075] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.442983] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779853, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062048} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.443365] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1707.444657] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea393dd7-a0ed-4627-adf8-2508af7bced6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.467444] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1707.468525] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac11513f-cd97-4b22-81e6-67903ea79422 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.483304] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bcb0ba-31c9-4d91-b851-a2bf42473e46 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.492344] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c278f6-790b-47e6-b68c-def93405164c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.495556] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1707.495556] env[63379]: value = "task-1779854" [ 1707.495556] env[63379]: _type = "Task" [ 1707.495556] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.525258] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b713c494-0ddb-488f-9a0c-0fa65109c5c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.531858] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779854, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.537299] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2b995e-8541-499d-81c3-ada157160604 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.551734] env[63379]: DEBUG nova.compute.provider_tree [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.006616] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779854, 'name': ReconfigVM_Task, 'duration_secs': 0.289833} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.006616] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Reconfigured VM instance instance-0000004e to attach disk [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1708.011055] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-217a18cb-f82a-4ea2-9206-26fdfdb01b38 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.014494] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1708.014494] env[63379]: value = "task-1779855" [ 1708.014494] env[63379]: _type = "Task" [ 1708.014494] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.023639] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779855, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.057187] env[63379]: DEBUG nova.scheduler.client.report [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1708.525489] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779855, 'name': Rename_Task, 'duration_secs': 0.142908} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.525780] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1708.526048] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4e89fa1-680a-46bd-a17e-d8d025b0eed9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.534257] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1708.534257] env[63379]: value = "task-1779856" [ 1708.534257] env[63379]: _type = "Task" [ 1708.534257] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.543940] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.561362] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.563833] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.119s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.564375] env[63379]: DEBUG nova.objects.instance [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lazy-loading 'resources' on Instance uuid 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1708.589973] env[63379]: INFO nova.scheduler.client.report [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted allocations for instance 0324da80-b97c-4dc9-9083-199fbda60341 [ 1709.045117] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779856, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.099972] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b0b2e202-4de6-42f1-a838-cfaac474ea46 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "0324da80-b97c-4dc9-9083-199fbda60341" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.204s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.222388] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "7edacb20-8472-4e9d-9408-31947d9f284e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.222388] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.287097] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "758ade2c-7f75-4907-95d5-681d5792ae31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.287408] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "758ade2c-7f75-4907-95d5-681d5792ae31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.287686] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "758ade2c-7f75-4907-95d5-681d5792ae31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.287891] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "758ade2c-7f75-4907-95d5-681d5792ae31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.288136] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "758ade2c-7f75-4907-95d5-681d5792ae31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.290222] env[63379]: INFO nova.compute.manager [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Terminating instance [ 1709.293202] env[63379]: DEBUG nova.compute.manager [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1709.293514] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1709.294507] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8b1efd-3f79-48ea-8817-6db9949bdc12 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.305926] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1709.306210] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a872ab2-bc7a-4e5b-aca5-8f8051624496 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.313796] env[63379]: DEBUG oslo_vmware.api [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1709.313796] env[63379]: value = "task-1779857" [ 1709.313796] env[63379]: _type = "Task" [ 1709.313796] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.326414] env[63379]: DEBUG oslo_vmware.api [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779857, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.392511] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1b31e8-052c-4915-86c4-6ffca9ea0130 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.402306] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f4ebd3-a6cf-4624-bced-4a95d3e8b775 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.433114] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a539f7-b317-44d1-a411-3c19283ffe83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.441841] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a219cf8-e960-4b55-9850-ad453754a90a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.456852] env[63379]: DEBUG nova.compute.provider_tree [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1709.545846] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779856, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.726551] env[63379]: DEBUG nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1709.825422] env[63379]: DEBUG oslo_vmware.api [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779857, 'name': PowerOffVM_Task, 'duration_secs': 0.193877} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.825750] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1709.825874] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1709.826191] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4703f421-e035-4b4f-85c8-23b8d7be1046 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.914468] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1709.914713] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1709.914904] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleting the datastore file [datastore1] 758ade2c-7f75-4907-95d5-681d5792ae31 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1709.915227] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e30c9d1-62c7-40e3-a494-8b464b863b60 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.924452] env[63379]: DEBUG oslo_vmware.api [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for the task: (returnval){ [ 1709.924452] env[63379]: value = "task-1779859" [ 1709.924452] env[63379]: _type = "Task" [ 1709.924452] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.934533] env[63379]: DEBUG oslo_vmware.api [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.962659] env[63379]: DEBUG nova.scheduler.client.report [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1710.057600] env[63379]: DEBUG oslo_vmware.api [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779856, 'name': PowerOnVM_Task, 'duration_secs': 1.134959} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.058257] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1710.058690] env[63379]: INFO nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1710.060030] env[63379]: DEBUG nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1710.060428] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648057fc-1b78-41b0-a862-899a09989cc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.254832] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.434824] env[63379]: DEBUG oslo_vmware.api [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Task: {'id': task-1779859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139451} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.435078] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1710.435340] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1710.435604] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1710.436525] env[63379]: INFO nova.compute.manager [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1710.436525] env[63379]: DEBUG oslo.service.loopingcall [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1710.436525] env[63379]: DEBUG nova.compute.manager [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1710.436743] env[63379]: DEBUG nova.network.neutron [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1710.475337] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.478843] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.443s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.478843] env[63379]: DEBUG nova.objects.instance [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lazy-loading 'resources' on Instance uuid da66c3d9-ca03-4113-8703-64b666628936 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1710.515418] env[63379]: INFO nova.scheduler.client.report [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleted allocations for instance 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655 [ 1710.586015] env[63379]: INFO nova.compute.manager [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Took 21.42 seconds to build instance. [ 1710.789747] env[63379]: DEBUG nova.compute.manager [req-55d6c146-fe94-4b8d-8a78-8c06000219bf req-ab4bf996-58a3-4149-ac17-dc97f096a92c service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Received event network-vif-deleted-021a6cdc-585b-40dc-a330-d328102cf80c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1710.789747] env[63379]: INFO nova.compute.manager [req-55d6c146-fe94-4b8d-8a78-8c06000219bf req-ab4bf996-58a3-4149-ac17-dc97f096a92c service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Neutron deleted interface 021a6cdc-585b-40dc-a330-d328102cf80c; detaching it from the instance and deleting it from the info cache [ 1710.789884] env[63379]: DEBUG nova.network.neutron [req-55d6c146-fe94-4b8d-8a78-8c06000219bf req-ab4bf996-58a3-4149-ac17-dc97f096a92c service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.026036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f94df2a8-50be-4351-a89a-ea51614c1d18 tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "7687aaa1-d1a0-4d0d-a6b4-47c454fe3655" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.967s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.087714] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e63570bc-203a-4632-a2c9-988aa8fb6ee4 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.939s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.231969] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd876aa-495b-4c89-aed8-271724078e7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.241133] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e8935d-b020-49d3-b444-bf5fa3a832bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.271793] env[63379]: DEBUG nova.network.neutron [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.273766] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf9fe58-6db5-4a1f-97f5-5d5759129efa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.282320] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742aed52-e996-47dc-a6b9-e00b0a7a4e17 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.296694] env[63379]: DEBUG nova.compute.provider_tree [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1711.298645] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d87322b-d0f6-47d4-978d-103e1d7b7670 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.307024] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6488da54-4edd-41a5-9806-b9d1e34951a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.338499] env[63379]: DEBUG nova.compute.manager [req-55d6c146-fe94-4b8d-8a78-8c06000219bf req-ab4bf996-58a3-4149-ac17-dc97f096a92c service nova] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Detach interface failed, port_id=021a6cdc-585b-40dc-a330-d328102cf80c, reason: Instance 758ade2c-7f75-4907-95d5-681d5792ae31 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1711.778631] env[63379]: INFO nova.compute.manager [-] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Took 1.34 seconds to deallocate network for instance. [ 1711.801426] env[63379]: DEBUG nova.scheduler.client.report [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1712.258072] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.258072] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.291749] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.292202] env[63379]: INFO nova.compute.manager [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Rebuilding instance [ 1712.308677] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.310637] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.528s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.314360] env[63379]: INFO nova.compute.claims [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1712.334492] env[63379]: INFO nova.scheduler.client.report [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Deleted allocations for instance da66c3d9-ca03-4113-8703-64b666628936 [ 1712.339994] env[63379]: DEBUG nova.compute.manager [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1712.340878] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e61368a-8d55-49b0-b701-8ccd9ecdbbe2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.760538] env[63379]: DEBUG nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1712.847603] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fda00fb3-011c-47ad-9289-3a601b14889e tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "da66c3d9-ca03-4113-8703-64b666628936" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.220s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.851954] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1712.852751] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ffb753c-d5d6-4744-8040-07934a65899a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.862063] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1712.862063] env[63379]: value = "task-1779860" [ 1712.862063] env[63379]: _type = "Task" [ 1712.862063] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.874127] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.998227] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.998227] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.216222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.216614] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.279623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.340416] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.340678] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.340879] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.341074] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.341249] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.345622] env[63379]: INFO nova.compute.manager [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Terminating instance [ 1713.347457] env[63379]: DEBUG nova.compute.manager [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1713.347688] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1713.348540] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7feeec35-08aa-4015-b35a-21c702376d53 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.358310] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1713.358547] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db40b7d0-2b00-4915-b592-eefab49b0535 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.368935] env[63379]: DEBUG oslo_vmware.api [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1713.368935] env[63379]: value = "task-1779861" [ 1713.368935] env[63379]: _type = "Task" [ 1713.368935] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.375579] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779860, 'name': PowerOffVM_Task, 'duration_secs': 0.195778} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.378538] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.378799] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1713.379839] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb23b38-51e2-4d70-8f2d-212222176104 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.387173] env[63379]: DEBUG oslo_vmware.api [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.391316] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.393691] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-201a9c8f-ce4f-4caa-b4c0-97e9b062e658 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.500791] env[63379]: DEBUG nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1713.543747] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.544068] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.580456] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854fd4ba-d108-4c5b-9443-fa1fc2bb4457 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.588707] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863403a8-6d14-4523-afd5-feb9d4ccb568 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.593461] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1713.593700] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1713.593919] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleting the datastore file [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1713.594217] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-878ed2dc-5007-4551-815d-a66ad195764b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.622067] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c206fbe-68dc-49ee-80fb-6e871b5b841c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.625626] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1713.625626] env[63379]: value = "task-1779863" [ 1713.625626] env[63379]: _type = "Task" [ 1713.625626] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.632059] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b329ef64-6882-4433-8e6d-656c7c8443fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.638417] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.647722] env[63379]: DEBUG nova.compute.provider_tree [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1713.719775] env[63379]: DEBUG nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1713.879172] env[63379]: DEBUG oslo_vmware.api [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779861, 'name': PowerOffVM_Task, 'duration_secs': 0.257513} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.879451] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.880078] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.880078] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f2101ae-50ae-4fcb-b6ea-4f8f55a423e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.021284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.046175] env[63379]: DEBUG nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1714.136776] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346519} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.137228] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.137816] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.137816] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.151415] env[63379]: DEBUG nova.scheduler.client.report [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1714.237941] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.568552] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.657099] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.657670] env[63379]: DEBUG nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1714.660510] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.244s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.660749] env[63379]: DEBUG nova.objects.instance [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lazy-loading 'resources' on Instance uuid 8b33e64a-ea19-4974-8c2d-350615b1e061 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1714.814934] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1714.815194] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1714.815393] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Deleting the datastore file [datastore1] 5c4ae6c6-538a-4724-ad77-340d9c60c24a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1714.815868] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c55a8d8-a5a2-4d8c-a34a-89aac259334d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.823747] env[63379]: DEBUG oslo_vmware.api [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for the task: (returnval){ [ 1714.823747] env[63379]: value = "task-1779865" [ 1714.823747] env[63379]: _type = "Task" [ 1714.823747] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.831203] env[63379]: DEBUG oslo_vmware.api [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779865, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.163558] env[63379]: DEBUG nova.compute.utils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1715.165145] env[63379]: DEBUG nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1715.165377] env[63379]: DEBUG nova.network.neutron [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1715.177863] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1715.178127] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1715.178327] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1715.178534] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1715.178689] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1715.178837] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1715.179065] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1715.179243] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1715.179410] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1715.179612] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1715.179809] env[63379]: DEBUG nova.virt.hardware [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1715.181091] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae32438f-1c67-404e-a47e-392a8b8309ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.193457] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b14d71-fd1b-4ce5-8b97-3c1b03875679 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.208016] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:3c:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d36760a-dfa7-4ce6-b2db-a72018c2a272', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1715.215656] env[63379]: DEBUG oslo.service.loopingcall [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1715.218756] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1715.219174] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-038ebe58-4aaa-4258-9a58-c783b2a3f134 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.241169] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1715.241169] env[63379]: value = "task-1779866" [ 1715.241169] env[63379]: _type = "Task" [ 1715.241169] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.251812] env[63379]: DEBUG nova.policy [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4daab3ae5955497a9d25b4ef59118d0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba1a1cf17f9941b299a6102689835f88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1715.258508] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779866, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.034338] env[63379]: DEBUG nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1716.037312] env[63379]: DEBUG oslo_vmware.api [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Task: {'id': task-1779865, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178598} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.037972] env[63379]: DEBUG nova.network.neutron [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Successfully created port: 9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1716.044878] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1716.045082] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1716.045267] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1716.045439] env[63379]: INFO nova.compute.manager [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Took 2.70 seconds to destroy the instance on the hypervisor. [ 1716.045671] env[63379]: DEBUG oslo.service.loopingcall [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.046255] env[63379]: DEBUG nova.compute.manager [-] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1716.046366] env[63379]: DEBUG nova.network.neutron [-] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1716.054984] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779866, 'name': CreateVM_Task, 'duration_secs': 0.406233} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.054984] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1716.055383] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.055558] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.055886] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1716.056148] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56822b4c-1fc2-4935-ba14-13f0dd7f6186 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.062654] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1716.062654] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521c10f4-792f-8b2e-2e07-942ee8e26ba9" [ 1716.062654] env[63379]: _type = "Task" [ 1716.062654] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.070307] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521c10f4-792f-8b2e-2e07-942ee8e26ba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.168511] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497724ea-99b3-4bce-a9c1-40d4ed23d3fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.175959] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f02f98b-4541-4a19-9642-27c18c9c2bca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.207168] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59f0f3c-c1d4-415f-b68f-81d193451a25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.215951] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a1d1a4-38fe-4cde-b66f-8cea55b78c45 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.230337] env[63379]: DEBUG nova.compute.provider_tree [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.320248] env[63379]: DEBUG nova.compute.manager [req-25bcca7d-fb3a-4a1b-9914-e5128cb482a5 req-8b7d8cc1-91e1-4acc-bd8a-9da7e32bd6ec service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Received event network-vif-deleted-9e2aaa43-4ac9-490a-a951-3521757945cd {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1716.320248] env[63379]: INFO nova.compute.manager [req-25bcca7d-fb3a-4a1b-9914-e5128cb482a5 req-8b7d8cc1-91e1-4acc-bd8a-9da7e32bd6ec service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Neutron deleted interface 9e2aaa43-4ac9-490a-a951-3521757945cd; detaching it from the instance and deleting it from the info cache [ 1716.320248] env[63379]: DEBUG nova.network.neutron [req-25bcca7d-fb3a-4a1b-9914-e5128cb482a5 req-8b7d8cc1-91e1-4acc-bd8a-9da7e32bd6ec service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.574135] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521c10f4-792f-8b2e-2e07-942ee8e26ba9, 'name': SearchDatastore_Task, 'duration_secs': 0.008928} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.574510] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.574812] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1716.575130] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.575339] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.575566] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1716.575857] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c185aaf8-afd6-4164-bc81-153bf598828b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.584708] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1716.584890] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1716.585847] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a018d8c2-5355-43ea-9623-b0367111c3d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.591622] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1716.591622] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525c5c5c-ea7b-596c-df5d-c3d673c79293" [ 1716.591622] env[63379]: _type = "Task" [ 1716.591622] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.599357] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525c5c5c-ea7b-596c-df5d-c3d673c79293, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.736608] env[63379]: DEBUG nova.scheduler.client.report [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1716.795579] env[63379]: DEBUG nova.network.neutron [-] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.825027] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f192829-073d-4487-b57a-26815e736ebe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.837525] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef47efb-2911-408a-84dc-4d4e9cb0f357 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.873747] env[63379]: DEBUG nova.compute.manager [req-25bcca7d-fb3a-4a1b-9914-e5128cb482a5 req-8b7d8cc1-91e1-4acc-bd8a-9da7e32bd6ec service nova] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Detach interface failed, port_id=9e2aaa43-4ac9-490a-a951-3521757945cd, reason: Instance 5c4ae6c6-538a-4724-ad77-340d9c60c24a could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1717.052167] env[63379]: DEBUG nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1717.078879] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1717.079215] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1717.079393] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1717.079586] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1717.079745] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1717.079981] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1717.080146] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1717.080325] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1717.080496] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1717.080671] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1717.080904] env[63379]: DEBUG nova.virt.hardware [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1717.082120] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84b3f4a-fd7f-48cb-98fc-0531d20f0df0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.090579] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7a5841-c2e1-48c1-b418-a679b6be1229 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.110407] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525c5c5c-ea7b-596c-df5d-c3d673c79293, 'name': SearchDatastore_Task, 'duration_secs': 0.008274} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.111167] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51a36834-04cf-40f1-bfcf-82c6576eae3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.116866] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1717.116866] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf688e-cdaf-d016-a287-eab44aec9e58" [ 1717.116866] env[63379]: _type = "Task" [ 1717.116866] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.126375] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf688e-cdaf-d016-a287-eab44aec9e58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.241992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.581s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.244347] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.901s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.244587] env[63379]: DEBUG nova.objects.instance [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lazy-loading 'resources' on Instance uuid 861cda26-f938-4b2e-ba3d-56b8469b6034 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1717.266703] env[63379]: INFO nova.scheduler.client.report [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Deleted allocations for instance 8b33e64a-ea19-4974-8c2d-350615b1e061 [ 1717.298601] env[63379]: INFO nova.compute.manager [-] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Took 1.25 seconds to deallocate network for instance. [ 1717.515163] env[63379]: DEBUG nova.compute.manager [req-065d6908-fc36-4eb2-9eb8-d76ddea14927 req-e2c5a15c-4007-4bea-86b9-71908363f6b3 service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Received event network-vif-plugged-9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1717.515163] env[63379]: DEBUG oslo_concurrency.lockutils [req-065d6908-fc36-4eb2-9eb8-d76ddea14927 req-e2c5a15c-4007-4bea-86b9-71908363f6b3 service nova] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.515163] env[63379]: DEBUG oslo_concurrency.lockutils [req-065d6908-fc36-4eb2-9eb8-d76ddea14927 req-e2c5a15c-4007-4bea-86b9-71908363f6b3 service nova] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.515163] env[63379]: DEBUG oslo_concurrency.lockutils [req-065d6908-fc36-4eb2-9eb8-d76ddea14927 req-e2c5a15c-4007-4bea-86b9-71908363f6b3 service nova] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.515163] env[63379]: DEBUG nova.compute.manager [req-065d6908-fc36-4eb2-9eb8-d76ddea14927 req-e2c5a15c-4007-4bea-86b9-71908363f6b3 service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] No waiting events found dispatching network-vif-plugged-9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1717.515163] env[63379]: WARNING nova.compute.manager [req-065d6908-fc36-4eb2-9eb8-d76ddea14927 req-e2c5a15c-4007-4bea-86b9-71908363f6b3 service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Received unexpected event network-vif-plugged-9d972f73-e98a-4c4d-8551-f7db527be2a7 for instance with vm_state building and task_state spawning. [ 1717.613251] env[63379]: DEBUG nova.network.neutron [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Successfully updated port: 9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1717.628637] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf688e-cdaf-d016-a287-eab44aec9e58, 'name': SearchDatastore_Task, 'duration_secs': 0.013689} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.629663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.629773] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1717.630047] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-596c9d46-fb57-438b-8e3a-770115148370 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.637346] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1717.637346] env[63379]: value = "task-1779867" [ 1717.637346] env[63379]: _type = "Task" [ 1717.637346] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.645392] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.774992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e878ee6-0622-4688-8677-f1eeb6b12cbb tempest-ImagesTestJSON-516714316 tempest-ImagesTestJSON-516714316-project-member] Lock "8b33e64a-ea19-4974-8c2d-350615b1e061" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.279s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.805841] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.035438] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7a3637-971a-43ba-9952-a6e530e8939f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.045415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2740c40-c4f1-4a65-9fb6-8f3e99c3f5cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.079396] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66408d99-1afb-4257-9e77-b76fe4ce31a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.087223] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9dfdc72-8d95-416e-8b8a-ab2e4f5f4b3a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.100503] env[63379]: DEBUG nova.compute.provider_tree [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1718.116115] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.116260] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.116473] env[63379]: DEBUG nova.network.neutron [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1718.147063] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463848} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.147436] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1718.147682] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1718.147938] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e33e1cfd-4d3e-40a4-9523-eb6d56f761f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.154583] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1718.154583] env[63379]: value = "task-1779868" [ 1718.154583] env[63379]: _type = "Task" [ 1718.154583] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.162608] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.604052] env[63379]: DEBUG nova.scheduler.client.report [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1718.664207] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779868, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061157} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.664537] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1718.665340] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9e94a7-0e9f-4bab-ae5f-f9365d893fe5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.668298] env[63379]: DEBUG nova.network.neutron [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1718.689521] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1718.690218] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a723cc89-0ceb-471a-833c-1dbcccc16913 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.718107] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1718.718107] env[63379]: value = "task-1779869" [ 1718.718107] env[63379]: _type = "Task" [ 1718.718107] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.727337] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779869, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.856711] env[63379]: DEBUG nova.network.neutron [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updating instance_info_cache with network_info: [{"id": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "address": "fa:16:3e:c4:ee:85", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d972f73-e9", "ovs_interfaceid": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.109641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.112987] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.857s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.114021] env[63379]: INFO nova.compute.claims [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1719.140888] env[63379]: INFO nova.scheduler.client.report [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Deleted allocations for instance 861cda26-f938-4b2e-ba3d-56b8469b6034 [ 1719.228438] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779869, 'name': ReconfigVM_Task, 'duration_secs': 0.277971} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.228736] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Reconfigured VM instance instance-0000004e to attach disk [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4/fad7a2dd-291f-4105-95a6-56bdbcc7acb4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1719.229408] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc570cec-4942-4261-a5bc-3b566f04589c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.235657] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1719.235657] env[63379]: value = "task-1779870" [ 1719.235657] env[63379]: _type = "Task" [ 1719.235657] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.244550] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779870, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.359399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.359740] env[63379]: DEBUG nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Instance network_info: |[{"id": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "address": "fa:16:3e:c4:ee:85", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d972f73-e9", "ovs_interfaceid": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1719.360198] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:ee:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1195acd-707f-4bac-a99d-14db17a63802', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d972f73-e98a-4c4d-8551-f7db527be2a7', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1719.368515] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Creating folder: Project (ba1a1cf17f9941b299a6102689835f88). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1719.368696] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb93ce33-79ce-414d-b48c-154ff69c9649 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.379068] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Created folder: Project (ba1a1cf17f9941b299a6102689835f88) in parent group-v369214. [ 1719.379312] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Creating folder: Instances. Parent ref: group-v369436. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1719.379602] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14543f70-a207-4769-a537-f8c4e6dc90f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.388909] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Created folder: Instances in parent group-v369436. [ 1719.389182] env[63379]: DEBUG oslo.service.loopingcall [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1719.389426] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1719.389624] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20e3be22-e7b1-44a8-8fe4-f6330dd5cae8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.408659] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1719.408659] env[63379]: value = "task-1779873" [ 1719.408659] env[63379]: _type = "Task" [ 1719.408659] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.416289] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779873, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.541409] env[63379]: DEBUG nova.compute.manager [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Received event network-changed-9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1719.541623] env[63379]: DEBUG nova.compute.manager [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Refreshing instance network info cache due to event network-changed-9d972f73-e98a-4c4d-8551-f7db527be2a7. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1719.541836] env[63379]: DEBUG oslo_concurrency.lockutils [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] Acquiring lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.541982] env[63379]: DEBUG oslo_concurrency.lockutils [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] Acquired lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.542164] env[63379]: DEBUG nova.network.neutron [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Refreshing network info cache for port 9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1719.649050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1bc93f90-c5f2-4977-b271-06ac84232882 tempest-InstanceActionsNegativeTestJSON-37425146 tempest-InstanceActionsNegativeTestJSON-37425146-project-member] Lock "861cda26-f938-4b2e-ba3d-56b8469b6034" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.249s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.747144] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779870, 'name': Rename_Task, 'duration_secs': 0.149382} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.747496] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1719.747711] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b751b3c-ef32-4e84-90d7-30d36d24371e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.756165] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1719.756165] env[63379]: value = "task-1779874" [ 1719.756165] env[63379]: _type = "Task" [ 1719.756165] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.763832] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779874, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.919311] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779873, 'name': CreateVM_Task, 'duration_secs': 0.4055} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.919521] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1719.920283] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.920467] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.920791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1719.921134] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25829859-3c12-4dd7-8bda-2ba531ab56c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.925291] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1719.925291] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f8fb50-c8f5-a44c-fdcd-598c561ea04f" [ 1719.925291] env[63379]: _type = "Task" [ 1719.925291] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.933259] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f8fb50-c8f5-a44c-fdcd-598c561ea04f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.266692] env[63379]: DEBUG oslo_vmware.api [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779874, 'name': PowerOnVM_Task, 'duration_secs': 0.467049} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.267052] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1720.267684] env[63379]: DEBUG nova.compute.manager [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1720.270944] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fec0d3d-297f-4721-a1c8-d94467ceb5b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.349993] env[63379]: DEBUG nova.network.neutron [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updated VIF entry in instance network info cache for port 9d972f73-e98a-4c4d-8551-f7db527be2a7. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1720.350404] env[63379]: DEBUG nova.network.neutron [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updating instance_info_cache with network_info: [{"id": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "address": "fa:16:3e:c4:ee:85", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d972f73-e9", "ovs_interfaceid": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.387909] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b12453d-3933-4f52-afd8-1e7d6869f642 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.397409] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188e4708-3522-4a59-b53b-55b3f9905695 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.437689] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18d7bd3-787f-4044-a045-7c839fa848fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.446634] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085a1851-12b3-4559-be8b-c650afe3086c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.450603] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f8fb50-c8f5-a44c-fdcd-598c561ea04f, 'name': SearchDatastore_Task, 'duration_secs': 0.015108} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.453847] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.453847] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1720.453847] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.453847] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.453847] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.453847] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fc9a1a5-5db2-459e-b78c-234132127549 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.461867] env[63379]: DEBUG nova.compute.provider_tree [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1720.470292] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.470392] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1720.471738] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf4c4687-208a-4522-afe4-8fe3e3424f8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.477278] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1720.477278] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5276b632-f15b-d8b2-ce93-24fb3b527049" [ 1720.477278] env[63379]: _type = "Task" [ 1720.477278] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.484882] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5276b632-f15b-d8b2-ce93-24fb3b527049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.792753] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.858458] env[63379]: DEBUG oslo_concurrency.lockutils [req-7fba78b7-cda6-4917-b6a6-adf16ce73e40 req-bb254f05-2091-4f0e-8fd1-69f59dce2bac service nova] Releasing lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.965035] env[63379]: DEBUG nova.scheduler.client.report [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1720.989226] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5276b632-f15b-d8b2-ce93-24fb3b527049, 'name': SearchDatastore_Task, 'duration_secs': 0.008658} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.990723] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0716619-bcd5-41d6-a3e2-7a3e896bf21c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.997066] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1720.997066] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d813d1-bb62-941d-a9f2-609eb05fa3b2" [ 1720.997066] env[63379]: _type = "Task" [ 1720.997066] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.009144] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d813d1-bb62-941d-a9f2-609eb05fa3b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.474640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.475237] env[63379]: DEBUG nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1721.484395] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.190s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.484395] env[63379]: DEBUG nova.objects.instance [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lazy-loading 'resources' on Instance uuid 758ade2c-7f75-4907-95d5-681d5792ae31 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.512734] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d813d1-bb62-941d-a9f2-609eb05fa3b2, 'name': SearchDatastore_Task, 'duration_secs': 0.009975} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.513027] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.513297] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 19941838-d6b0-4fb8-9d06-f4a1b80ba428/19941838-d6b0-4fb8-9d06-f4a1b80ba428.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1721.513564] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb419d45-ae16-49e8-8b52-56942ba2a1e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.521930] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1721.521930] env[63379]: value = "task-1779875" [ 1721.521930] env[63379]: _type = "Task" [ 1721.521930] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.534824] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779875, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.793484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.793484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.793484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.793484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.793484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.795042] env[63379]: INFO nova.compute.manager [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Terminating instance [ 1721.800079] env[63379]: DEBUG nova.compute.manager [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1721.803023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1721.803023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89504949-f831-42eb-9193-c113c985c1e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.814111] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1721.814111] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-423a6d9f-8adf-4d43-a53f-b3afa686e1a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.825395] env[63379]: DEBUG oslo_vmware.api [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1721.825395] env[63379]: value = "task-1779876" [ 1721.825395] env[63379]: _type = "Task" [ 1721.825395] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.838022] env[63379]: DEBUG oslo_vmware.api [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.985939] env[63379]: DEBUG nova.compute.utils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1721.988008] env[63379]: DEBUG nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1721.988008] env[63379]: DEBUG nova.network.neutron [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1722.037801] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779875, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497275} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.037971] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 19941838-d6b0-4fb8-9d06-f4a1b80ba428/19941838-d6b0-4fb8-9d06-f4a1b80ba428.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1722.039321] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1722.039321] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-498ab0f0-a563-4ca8-a718-28dc3e22af91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.046766] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1722.046766] env[63379]: value = "task-1779877" [ 1722.046766] env[63379]: _type = "Task" [ 1722.046766] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.063318] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.065607] env[63379]: DEBUG nova.policy [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991a93509b8943a693859488a56352b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928a9d102f0e45b897eae72fa566c0fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1722.293986] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1cf3b1-efc4-4d00-b2e6-3e431edef9e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.302529] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049e7cec-df07-4f30-9447-f8ba533a530e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.343392] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c618ce-242c-4ba2-9947-859aabfaae57 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.353633] env[63379]: DEBUG oslo_vmware.api [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779876, 'name': PowerOffVM_Task, 'duration_secs': 0.335503} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.357013] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1722.357013] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1722.360745] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-907a53bd-2001-4cdf-b63c-43f0773aa477 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.361652] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6148548f-f64d-482d-af9d-e8c0d234291f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.379816] env[63379]: DEBUG nova.compute.provider_tree [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.392805] env[63379]: DEBUG nova.network.neutron [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Successfully created port: 1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1722.439468] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "eda684fa-1595-4985-beb7-c298049411bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.439541] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.456987] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1722.457236] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1722.457456] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleting the datastore file [datastore1] fad7a2dd-291f-4105-95a6-56bdbcc7acb4 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1722.457690] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7e35dd3-6f5a-46f7-8acd-e913111360f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.465416] env[63379]: DEBUG oslo_vmware.api [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1722.465416] env[63379]: value = "task-1779879" [ 1722.465416] env[63379]: _type = "Task" [ 1722.465416] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.473310] env[63379]: DEBUG oslo_vmware.api [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.494168] env[63379]: DEBUG nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1722.557078] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060196} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.557349] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1722.558570] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1470ba6b-8e55-4ecf-83ef-bd4462532a03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.581530] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 19941838-d6b0-4fb8-9d06-f4a1b80ba428/19941838-d6b0-4fb8-9d06-f4a1b80ba428.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1722.581836] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d389c1f-60e7-4b8f-b3bf-148bd3d3e08e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.601858] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1722.601858] env[63379]: value = "task-1779880" [ 1722.601858] env[63379]: _type = "Task" [ 1722.601858] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.610467] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.883649] env[63379]: DEBUG nova.scheduler.client.report [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.941525] env[63379]: DEBUG nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1722.977760] env[63379]: DEBUG oslo_vmware.api [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250549} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.977848] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1722.978027] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1722.978292] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1722.978383] env[63379]: INFO nova.compute.manager [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1722.978739] env[63379]: DEBUG oslo.service.loopingcall [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.978816] env[63379]: DEBUG nova.compute.manager [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1722.982779] env[63379]: DEBUG nova.network.neutron [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1723.117023] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779880, 'name': ReconfigVM_Task, 'duration_secs': 0.286502} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.119055] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 19941838-d6b0-4fb8-9d06-f4a1b80ba428/19941838-d6b0-4fb8-9d06-f4a1b80ba428.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1723.119708] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b479a81-a857-402b-ba46-c47a6d8b35ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.126538] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1723.126538] env[63379]: value = "task-1779881" [ 1723.126538] env[63379]: _type = "Task" [ 1723.126538] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.134764] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779881, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.354906] env[63379]: DEBUG nova.compute.manager [req-2ca045a7-7ceb-4921-a618-678a2bfa2cde req-c8c87a35-4b76-4f25-b950-6be367e06ead service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Received event network-vif-deleted-9d36760a-dfa7-4ce6-b2db-a72018c2a272 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1723.355154] env[63379]: INFO nova.compute.manager [req-2ca045a7-7ceb-4921-a618-678a2bfa2cde req-c8c87a35-4b76-4f25-b950-6be367e06ead service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Neutron deleted interface 9d36760a-dfa7-4ce6-b2db-a72018c2a272; detaching it from the instance and deleting it from the info cache [ 1723.355466] env[63379]: DEBUG nova.network.neutron [req-2ca045a7-7ceb-4921-a618-678a2bfa2cde req-c8c87a35-4b76-4f25-b950-6be367e06ead service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.391150] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.394337] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.114s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.397605] env[63379]: INFO nova.compute.claims [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1723.423800] env[63379]: INFO nova.scheduler.client.report [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Deleted allocations for instance 758ade2c-7f75-4907-95d5-681d5792ae31 [ 1723.471986] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.504724] env[63379]: DEBUG nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1723.541410] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1723.541410] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1723.541410] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.541410] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1723.541594] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.541707] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1723.541930] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1723.542107] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1723.542278] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1723.542446] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1723.542620] env[63379]: DEBUG nova.virt.hardware [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1723.543547] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c4c637-2cdb-459d-ac86-d85cc35e8424 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.552976] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21ffe6f-36f4-4278-8e04-754394458fd8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.638018] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779881, 'name': Rename_Task, 'duration_secs': 0.161712} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.638018] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1723.638018] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f7fcb9a-38c0-4d09-87c8-cbf40ad304cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.644733] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1723.644733] env[63379]: value = "task-1779882" [ 1723.644733] env[63379]: _type = "Task" [ 1723.644733] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.656019] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.824652] env[63379]: DEBUG nova.network.neutron [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.861892] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63fe060c-c1e6-4968-a854-dab01f24d45b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.871400] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e42af0-4d4f-4104-a4b7-28c52272c3cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.904928] env[63379]: DEBUG nova.compute.manager [req-2ca045a7-7ceb-4921-a618-678a2bfa2cde req-c8c87a35-4b76-4f25-b950-6be367e06ead service nova] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Detach interface failed, port_id=9d36760a-dfa7-4ce6-b2db-a72018c2a272, reason: Instance fad7a2dd-291f-4105-95a6-56bdbcc7acb4 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1723.935077] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db6f6327-ec19-47ef-bba8-47c450490ec9 tempest-ServersAdminTestJSON-360986763 tempest-ServersAdminTestJSON-360986763-project-member] Lock "758ade2c-7f75-4907-95d5-681d5792ae31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.647s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.158797] env[63379]: DEBUG oslo_vmware.api [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1779882, 'name': PowerOnVM_Task, 'duration_secs': 0.442685} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.159294] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1724.159766] env[63379]: INFO nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Took 7.11 seconds to spawn the instance on the hypervisor. [ 1724.160189] env[63379]: DEBUG nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1724.161238] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7296f9d1-9bf1-4a67-bc2f-ecdcfaf5a55a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.248020] env[63379]: DEBUG nova.compute.manager [req-872ad390-45dc-4435-ba5d-8a904deaca43 req-c47cc426-20b2-488c-9ff5-ad2957d263ca service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Received event network-vif-plugged-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1724.248020] env[63379]: DEBUG oslo_concurrency.lockutils [req-872ad390-45dc-4435-ba5d-8a904deaca43 req-c47cc426-20b2-488c-9ff5-ad2957d263ca service nova] Acquiring lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.248020] env[63379]: DEBUG oslo_concurrency.lockutils [req-872ad390-45dc-4435-ba5d-8a904deaca43 req-c47cc426-20b2-488c-9ff5-ad2957d263ca service nova] Lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.248020] env[63379]: DEBUG oslo_concurrency.lockutils [req-872ad390-45dc-4435-ba5d-8a904deaca43 req-c47cc426-20b2-488c-9ff5-ad2957d263ca service nova] Lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.248020] env[63379]: DEBUG nova.compute.manager [req-872ad390-45dc-4435-ba5d-8a904deaca43 req-c47cc426-20b2-488c-9ff5-ad2957d263ca service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] No waiting events found dispatching network-vif-plugged-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1724.248020] env[63379]: WARNING nova.compute.manager [req-872ad390-45dc-4435-ba5d-8a904deaca43 req-c47cc426-20b2-488c-9ff5-ad2957d263ca service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Received unexpected event network-vif-plugged-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab for instance with vm_state building and task_state spawning. [ 1724.328573] env[63379]: INFO nova.compute.manager [-] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Took 1.35 seconds to deallocate network for instance. [ 1724.421112] env[63379]: DEBUG nova.network.neutron [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Successfully updated port: 1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1724.684976] env[63379]: INFO nova.compute.manager [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Took 20.93 seconds to build instance. [ 1724.736865] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84185074-302f-4d02-b873-3bcf9ea7bb71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.745225] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c233a1-32a4-4ef7-8a98-fa37c9399dcd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.754069] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "3e875e92-673c-4cfa-86ce-fc270ae03e94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.754319] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.785837] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8aab21c-9beb-4674-9576-3530d9af9bc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.794238] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1aa4f27-806b-4854-9357-3c70a4e7ae91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.809600] env[63379]: DEBUG nova.compute.provider_tree [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1724.836708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.924187] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.924428] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.924550] env[63379]: DEBUG nova.network.neutron [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1725.188482] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dc59e6f5-c641-4ac1-8029-00dc1e5ae7c2 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.443s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.259966] env[63379]: DEBUG nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1725.312628] env[63379]: DEBUG nova.scheduler.client.report [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1725.475230] env[63379]: DEBUG nova.network.neutron [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1725.704839] env[63379]: DEBUG nova.network.neutron [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Updating instance_info_cache with network_info: [{"id": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "address": "fa:16:3e:8b:77:3f", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c98bd39-d7", "ovs_interfaceid": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.790885] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.819820] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.820480] env[63379]: DEBUG nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1725.827165] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.806s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.830594] env[63379]: INFO nova.compute.claims [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1725.943124] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "19a41941-0679-4971-8a44-c95b13f5c294" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.943399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.210341] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.211217] env[63379]: DEBUG nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Instance network_info: |[{"id": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "address": "fa:16:3e:8b:77:3f", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c98bd39-d7", "ovs_interfaceid": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1726.211751] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:77:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c98bd39-d74e-43ba-9a95-dcbdb4d073ab', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1726.220450] env[63379]: DEBUG oslo.service.loopingcall [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.221239] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1726.221519] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b7a8fe6-1bce-455e-b980-366bd0fbbdfa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.243622] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1726.243622] env[63379]: value = "task-1779883" [ 1726.243622] env[63379]: _type = "Task" [ 1726.243622] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.253144] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779883, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.319814] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.319814] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.335158] env[63379]: DEBUG nova.compute.utils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1726.338627] env[63379]: DEBUG nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1726.341026] env[63379]: DEBUG nova.network.neutron [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1726.422047] env[63379]: DEBUG nova.compute.manager [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Received event network-changed-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1726.422265] env[63379]: DEBUG nova.compute.manager [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Refreshing instance network info cache due to event network-changed-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1726.422549] env[63379]: DEBUG oslo_concurrency.lockutils [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] Acquiring lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.422732] env[63379]: DEBUG oslo_concurrency.lockutils [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] Acquired lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.423653] env[63379]: DEBUG nova.network.neutron [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Refreshing network info cache for port 1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1726.426282] env[63379]: DEBUG nova.policy [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a52cb7db81d24a8faddcb40308665627', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50144e7fcb0642d7a1d1514f2233f555', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1726.447954] env[63379]: DEBUG nova.compute.utils [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1726.566707] env[63379]: DEBUG nova.compute.manager [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Received event network-changed-9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1726.567100] env[63379]: DEBUG nova.compute.manager [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Refreshing instance network info cache due to event network-changed-9d972f73-e98a-4c4d-8551-f7db527be2a7. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1726.567485] env[63379]: DEBUG oslo_concurrency.lockutils [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] Acquiring lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.567485] env[63379]: DEBUG oslo_concurrency.lockutils [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] Acquired lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.567653] env[63379]: DEBUG nova.network.neutron [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Refreshing network info cache for port 9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1726.755506] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779883, 'name': CreateVM_Task, 'duration_secs': 0.409739} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.755743] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1726.756551] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.756933] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.758033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1726.758514] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57606d0b-7641-4246-8ce7-ec1ca53e1f5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.764092] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1726.764092] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52facb0e-d49c-2d91-34bd-f62a654b4e17" [ 1726.764092] env[63379]: _type = "Task" [ 1726.764092] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.773273] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52facb0e-d49c-2d91-34bd-f62a654b4e17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.822020] env[63379]: DEBUG nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1726.838397] env[63379]: DEBUG nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1726.943077] env[63379]: DEBUG nova.network.neutron [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Successfully created port: fec55523-e298-4c52-8e45-f7a01d691c42 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1726.954706] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.185480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eccfef9-6ded-4ebb-b4dc-81f0a78698b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.195647] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c69f55-3ddf-422c-909c-86b6a8a6a12f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.229903] env[63379]: DEBUG nova.network.neutron [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Updated VIF entry in instance network info cache for port 1c98bd39-d74e-43ba-9a95-dcbdb4d073ab. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1727.230817] env[63379]: DEBUG nova.network.neutron [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Updating instance_info_cache with network_info: [{"id": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "address": "fa:16:3e:8b:77:3f", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c98bd39-d7", "ovs_interfaceid": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.232550] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e84056-e505-4d5c-b6bc-5e07e401a3a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.243790] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441173e6-e399-4302-a335-10237dc8b1df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.260485] env[63379]: DEBUG nova.compute.provider_tree [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1727.274303] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52facb0e-d49c-2d91-34bd-f62a654b4e17, 'name': SearchDatastore_Task, 'duration_secs': 0.009194} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.276738] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.276986] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1727.277282] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.277410] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.277595] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.278190] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c8988e3-7ed5-4c25-b24d-8b783d6e3751 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.286742] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.286959] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1727.287709] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2a2539d-83df-4da3-9244-1db29a336f7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.294234] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1727.294234] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b54ea4-c289-8145-79b6-8eabec9c2b06" [ 1727.294234] env[63379]: _type = "Task" [ 1727.294234] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.302578] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b54ea4-c289-8145-79b6-8eabec9c2b06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.351023] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.389323] env[63379]: DEBUG nova.network.neutron [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updated VIF entry in instance network info cache for port 9d972f73-e98a-4c4d-8551-f7db527be2a7. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1727.389807] env[63379]: DEBUG nova.network.neutron [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updating instance_info_cache with network_info: [{"id": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "address": "fa:16:3e:c4:ee:85", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d972f73-e9", "ovs_interfaceid": "9d972f73-e98a-4c4d-8551-f7db527be2a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.737731] env[63379]: DEBUG oslo_concurrency.lockutils [req-7e05b198-0669-4813-8e5e-209569063f46 req-0839cf85-7114-45f5-be3c-7966946c779c service nova] Releasing lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.762902] env[63379]: DEBUG nova.scheduler.client.report [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1727.809370] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b54ea4-c289-8145-79b6-8eabec9c2b06, 'name': SearchDatastore_Task, 'duration_secs': 0.010422} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.809370] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4bfbe86-5f23-417c-b6c9-6caa1aac4bf0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.821346] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1727.821346] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a472e6-9e10-463f-75b1-a0e16087ea6f" [ 1727.821346] env[63379]: _type = "Task" [ 1727.821346] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.831828] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a472e6-9e10-463f-75b1-a0e16087ea6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.852022] env[63379]: DEBUG nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1727.884992] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1727.886070] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1727.886070] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1727.886070] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1727.886070] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1727.886070] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1727.886276] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1727.886539] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1727.886698] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1727.886885] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1727.887265] env[63379]: DEBUG nova.virt.hardware [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1727.888209] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46431388-d8d8-4d1f-b598-a02ec9d987bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.892690] env[63379]: DEBUG oslo_concurrency.lockutils [req-b402d3a1-066a-4022-bdf8-d282fe64c761 req-8ec6655f-207a-49ae-bdce-e154ccceb39a service nova] Releasing lock "refresh_cache-19941838-d6b0-4fb8-9d06-f4a1b80ba428" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.896219] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2124fa58-0529-4e20-9954-134ec163e932 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.053929] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "19a41941-0679-4971-8a44-c95b13f5c294" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.054209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.054456] env[63379]: INFO nova.compute.manager [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Attaching volume 9d889203-dc27-4007-a5c2-f62dd5709f2f to /dev/sdb [ 1728.097883] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0879a5-d8d5-4747-97ad-dd507fed05c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.105153] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d40aa9-2036-4199-bf23-c954d2249921 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.117926] env[63379]: DEBUG nova.virt.block_device [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating existing volume attachment record: 1869c442-4e3c-4e71-aca9-58a4d257b1e0 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1728.268381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.268952] env[63379]: DEBUG nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1728.271551] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.034s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.273187] env[63379]: INFO nova.compute.claims [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1728.332027] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a472e6-9e10-463f-75b1-a0e16087ea6f, 'name': SearchDatastore_Task, 'duration_secs': 0.010479} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.332368] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.332679] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 7edacb20-8472-4e9d-9408-31947d9f284e/7edacb20-8472-4e9d-9408-31947d9f284e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1728.332984] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dcd5ae6-9162-41ed-af4d-700cff99a851 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.339310] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1728.339310] env[63379]: value = "task-1779885" [ 1728.339310] env[63379]: _type = "Task" [ 1728.339310] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.347769] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.725206] env[63379]: DEBUG nova.compute.manager [req-d25f8d2d-a824-4164-bfdf-4d11fcff0a38 req-ab771631-b62a-4b53-8418-019b4072c737 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Received event network-vif-plugged-fec55523-e298-4c52-8e45-f7a01d691c42 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1728.725698] env[63379]: DEBUG oslo_concurrency.lockutils [req-d25f8d2d-a824-4164-bfdf-4d11fcff0a38 req-ab771631-b62a-4b53-8418-019b4072c737 service nova] Acquiring lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.728562] env[63379]: DEBUG oslo_concurrency.lockutils [req-d25f8d2d-a824-4164-bfdf-4d11fcff0a38 req-ab771631-b62a-4b53-8418-019b4072c737 service nova] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.728562] env[63379]: DEBUG oslo_concurrency.lockutils [req-d25f8d2d-a824-4164-bfdf-4d11fcff0a38 req-ab771631-b62a-4b53-8418-019b4072c737 service nova] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.728562] env[63379]: DEBUG nova.compute.manager [req-d25f8d2d-a824-4164-bfdf-4d11fcff0a38 req-ab771631-b62a-4b53-8418-019b4072c737 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] No waiting events found dispatching network-vif-plugged-fec55523-e298-4c52-8e45-f7a01d691c42 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1728.728562] env[63379]: WARNING nova.compute.manager [req-d25f8d2d-a824-4164-bfdf-4d11fcff0a38 req-ab771631-b62a-4b53-8418-019b4072c737 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Received unexpected event network-vif-plugged-fec55523-e298-4c52-8e45-f7a01d691c42 for instance with vm_state building and task_state spawning. [ 1728.780057] env[63379]: DEBUG nova.compute.utils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1728.784432] env[63379]: DEBUG nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1728.785179] env[63379]: DEBUG nova.network.neutron [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1728.846581] env[63379]: DEBUG nova.policy [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6b34522806e0473bb0bfd20aa9a27a06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26e96d98928449efaf2999f78cd52fac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1728.854428] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779885, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.867066] env[63379]: DEBUG nova.network.neutron [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Successfully updated port: fec55523-e298-4c52-8e45-f7a01d691c42 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1729.179554] env[63379]: DEBUG nova.network.neutron [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Successfully created port: 1f122953-4fde-41ae-9895-0ef67cacb236 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1729.285118] env[63379]: DEBUG nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1729.353133] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779885, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51431} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.353531] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 7edacb20-8472-4e9d-9408-31947d9f284e/7edacb20-8472-4e9d-9408-31947d9f284e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1729.353857] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1729.354164] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95a00f61-c82c-4166-86c1-65eabc5bdb19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.364220] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1729.364220] env[63379]: value = "task-1779888" [ 1729.364220] env[63379]: _type = "Task" [ 1729.364220] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.371118] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "refresh_cache-6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.371217] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "refresh_cache-6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.371392] env[63379]: DEBUG nova.network.neutron [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1729.376123] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779888, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.553674] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff25d99b-b679-4c8a-8d21-149499adc828 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.561870] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f8d3fb-4c0a-4d85-8995-bc1e19898cb5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.593494] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003ca16d-86f5-49c1-838f-97dfbe698e22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.601643] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b74994f-dfd2-4f7e-89fe-c156c014b19c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.617911] env[63379]: DEBUG nova.compute.provider_tree [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1729.874856] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779888, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068118} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.875159] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1729.875978] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f277d035-6794-4960-a4c9-51b96fe257d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.902094] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 7edacb20-8472-4e9d-9408-31947d9f284e/7edacb20-8472-4e9d-9408-31947d9f284e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1729.902411] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acdd4f27-260a-40a7-aa58-ea2c8a0cf301 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.923816] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1729.923816] env[63379]: value = "task-1779889" [ 1729.923816] env[63379]: _type = "Task" [ 1729.923816] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.940132] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.941083] env[63379]: DEBUG nova.network.neutron [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1730.120365] env[63379]: DEBUG nova.scheduler.client.report [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1730.294839] env[63379]: DEBUG nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1730.310499] env[63379]: DEBUG nova.network.neutron [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Updating instance_info_cache with network_info: [{"id": "fec55523-e298-4c52-8e45-f7a01d691c42", "address": "fa:16:3e:f2:76:61", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfec55523-e2", "ovs_interfaceid": "fec55523-e298-4c52-8e45-f7a01d691c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.321464] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1730.321718] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1730.321965] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1730.322227] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1730.322389] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1730.322547] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1730.322763] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1730.322955] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1730.323271] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1730.323481] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1730.323668] env[63379]: DEBUG nova.virt.hardware [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1730.324653] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c90c6a-1816-41b3-80f1-7e5f641e2f35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.334211] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2766863b-915f-4337-89ec-db0411a54141 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.435365] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779889, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.631836] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.632538] env[63379]: DEBUG nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1730.635672] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.067s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.637417] env[63379]: INFO nova.compute.claims [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1730.723175] env[63379]: DEBUG nova.compute.manager [req-a10ded44-5ab9-4fd4-984e-83087db01e1e req-b757b4c2-6acf-4b22-810e-7e770e24c452 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Received event network-vif-plugged-1f122953-4fde-41ae-9895-0ef67cacb236 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1730.723426] env[63379]: DEBUG oslo_concurrency.lockutils [req-a10ded44-5ab9-4fd4-984e-83087db01e1e req-b757b4c2-6acf-4b22-810e-7e770e24c452 service nova] Acquiring lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.723650] env[63379]: DEBUG oslo_concurrency.lockutils [req-a10ded44-5ab9-4fd4-984e-83087db01e1e req-b757b4c2-6acf-4b22-810e-7e770e24c452 service nova] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.723823] env[63379]: DEBUG oslo_concurrency.lockutils [req-a10ded44-5ab9-4fd4-984e-83087db01e1e req-b757b4c2-6acf-4b22-810e-7e770e24c452 service nova] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.724014] env[63379]: DEBUG nova.compute.manager [req-a10ded44-5ab9-4fd4-984e-83087db01e1e req-b757b4c2-6acf-4b22-810e-7e770e24c452 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] No waiting events found dispatching network-vif-plugged-1f122953-4fde-41ae-9895-0ef67cacb236 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1730.724349] env[63379]: WARNING nova.compute.manager [req-a10ded44-5ab9-4fd4-984e-83087db01e1e req-b757b4c2-6acf-4b22-810e-7e770e24c452 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Received unexpected event network-vif-plugged-1f122953-4fde-41ae-9895-0ef67cacb236 for instance with vm_state building and task_state spawning. [ 1730.757732] env[63379]: DEBUG nova.compute.manager [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Received event network-changed-fec55523-e298-4c52-8e45-f7a01d691c42 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1730.758272] env[63379]: DEBUG nova.compute.manager [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Refreshing instance network info cache due to event network-changed-fec55523-e298-4c52-8e45-f7a01d691c42. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1730.758384] env[63379]: DEBUG oslo_concurrency.lockutils [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] Acquiring lock "refresh_cache-6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.813360] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "refresh_cache-6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.814607] env[63379]: DEBUG nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Instance network_info: |[{"id": "fec55523-e298-4c52-8e45-f7a01d691c42", "address": "fa:16:3e:f2:76:61", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfec55523-e2", "ovs_interfaceid": "fec55523-e298-4c52-8e45-f7a01d691c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1730.814607] env[63379]: DEBUG oslo_concurrency.lockutils [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] Acquired lock "refresh_cache-6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.814607] env[63379]: DEBUG nova.network.neutron [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Refreshing network info cache for port fec55523-e298-4c52-8e45-f7a01d691c42 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1730.815864] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:76:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fec55523-e298-4c52-8e45-f7a01d691c42', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1730.830296] env[63379]: DEBUG oslo.service.loopingcall [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1730.831400] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1730.835366] env[63379]: DEBUG nova.network.neutron [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Successfully updated port: 1f122953-4fde-41ae-9895-0ef67cacb236 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1730.835366] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49ad492c-0312-4b5a-8c8f-77a09f89fbe5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.861771] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1730.861771] env[63379]: value = "task-1779891" [ 1730.861771] env[63379]: _type = "Task" [ 1730.861771] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.870954] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779891, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.936033] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779889, 'name': ReconfigVM_Task, 'duration_secs': 0.820688} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.936360] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 7edacb20-8472-4e9d-9408-31947d9f284e/7edacb20-8472-4e9d-9408-31947d9f284e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1730.937094] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2bc9c41-b8ca-435d-b982-fcd0815dd35c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.945277] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1730.945277] env[63379]: value = "task-1779892" [ 1730.945277] env[63379]: _type = "Task" [ 1730.945277] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.953723] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779892, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.142628] env[63379]: DEBUG nova.compute.utils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1731.146702] env[63379]: DEBUG nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1731.146893] env[63379]: DEBUG nova.network.neutron [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1731.194845] env[63379]: DEBUG nova.policy [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6b34522806e0473bb0bfd20aa9a27a06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26e96d98928449efaf2999f78cd52fac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1731.335362] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.335675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.335675] env[63379]: DEBUG nova.network.neutron [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1731.373346] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779891, 'name': CreateVM_Task, 'duration_secs': 0.440331} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.373553] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1731.374200] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.374367] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.374803] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1731.375094] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f8f1db2-5b14-4df5-aec3-faf1583fbb5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.380835] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1731.380835] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e6b5c-841f-773b-046e-613b9572c46d" [ 1731.380835] env[63379]: _type = "Task" [ 1731.380835] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.391501] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e6b5c-841f-773b-046e-613b9572c46d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.456423] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779892, 'name': Rename_Task, 'duration_secs': 0.152548} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.456707] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1731.456965] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eff101cc-7d60-4658-af54-ccd4d1e212a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.464536] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1731.464536] env[63379]: value = "task-1779893" [ 1731.464536] env[63379]: _type = "Task" [ 1731.464536] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.465677] env[63379]: DEBUG nova.network.neutron [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Successfully created port: 04979812-3eaa-4eb6-9a6f-306c8544d56b {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1731.483046] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779893, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.651070] env[63379]: DEBUG nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1731.718461] env[63379]: DEBUG nova.network.neutron [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Updated VIF entry in instance network info cache for port fec55523-e298-4c52-8e45-f7a01d691c42. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1731.718882] env[63379]: DEBUG nova.network.neutron [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Updating instance_info_cache with network_info: [{"id": "fec55523-e298-4c52-8e45-f7a01d691c42", "address": "fa:16:3e:f2:76:61", "network": {"id": "393f09d9-160a-48f1-acdf-cf2e43117ed7", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-834514240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50144e7fcb0642d7a1d1514f2233f555", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfec55523-e2", "ovs_interfaceid": "fec55523-e298-4c52-8e45-f7a01d691c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1731.870584] env[63379]: DEBUG nova.network.neutron [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1731.899997] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e6b5c-841f-773b-046e-613b9572c46d, 'name': SearchDatastore_Task, 'duration_secs': 0.010066} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.900162] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.900370] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1731.900590] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.900732] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.901329] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1731.901329] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c623da3-4974-4d14-8d7e-30b9de7d1ca3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.924704] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1731.924998] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1731.926165] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15cbae33-5a13-4c60-b8e0-404937df0626 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.933454] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1731.933454] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524c580c-784f-16c0-305d-8d18a9ffd454" [ 1731.933454] env[63379]: _type = "Task" [ 1731.933454] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.942605] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524c580c-784f-16c0-305d-8d18a9ffd454, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.983839] env[63379]: DEBUG oslo_vmware.api [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779893, 'name': PowerOnVM_Task, 'duration_secs': 0.488027} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.983839] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1731.983839] env[63379]: INFO nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Took 8.48 seconds to spawn the instance on the hypervisor. [ 1731.983839] env[63379]: DEBUG nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1731.983839] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc78628f-86de-4342-ae02-6ab311015fe0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.997508] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac5ad87-f97c-4d90-882c-f9771cf39b72 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.008296] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57e3de8-9580-4e7b-9790-35304292f41b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.039480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3eee049-42c3-44d8-9e5b-80a13e8347cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.048937] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac67e69-1414-4b57-b7d4-7ce8bd8f5ee4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.063651] env[63379]: DEBUG nova.compute.provider_tree [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1732.089421] env[63379]: DEBUG nova.network.neutron [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Updating instance_info_cache with network_info: [{"id": "1f122953-4fde-41ae-9895-0ef67cacb236", "address": "fa:16:3e:fc:b1:40", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f122953-4f", "ovs_interfaceid": "1f122953-4fde-41ae-9895-0ef67cacb236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.225388] env[63379]: DEBUG oslo_concurrency.lockutils [req-377c56b1-fbb0-4582-826b-36ff169356c4 req-3ac5bf18-f3f0-43b7-80a6-4d639234b262 service nova] Releasing lock "refresh_cache-6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.447083] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524c580c-784f-16c0-305d-8d18a9ffd454, 'name': SearchDatastore_Task, 'duration_secs': 0.010605} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.447924] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46abad6d-ac29-4961-a988-aac50fcc01e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.453334] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1732.453334] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e8e78-80a1-dd6f-3011-7dc4cca5478f" [ 1732.453334] env[63379]: _type = "Task" [ 1732.453334] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.461662] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e8e78-80a1-dd6f-3011-7dc4cca5478f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.513055] env[63379]: INFO nova.compute.manager [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Took 22.28 seconds to build instance. [ 1732.570036] env[63379]: DEBUG nova.scheduler.client.report [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1732.592698] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.592698] env[63379]: DEBUG nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Instance network_info: |[{"id": "1f122953-4fde-41ae-9895-0ef67cacb236", "address": "fa:16:3e:fc:b1:40", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f122953-4f", "ovs_interfaceid": "1f122953-4fde-41ae-9895-0ef67cacb236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1732.592698] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:b1:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f122953-4fde-41ae-9895-0ef67cacb236', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1732.602060] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Creating folder: Project (26e96d98928449efaf2999f78cd52fac). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1732.602879] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb3f170a-b249-4445-8d24-6468363593e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.614666] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Created folder: Project (26e96d98928449efaf2999f78cd52fac) in parent group-v369214. [ 1732.614896] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Creating folder: Instances. Parent ref: group-v369443. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1732.615139] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2abb0e12-7cf2-46c1-acf3-6758b6d6c4ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.624351] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Created folder: Instances in parent group-v369443. [ 1732.624616] env[63379]: DEBUG oslo.service.loopingcall [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.624837] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1732.625017] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a27e0436-e1f6-4f84-9033-5d7743520b26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.646394] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1732.646394] env[63379]: value = "task-1779896" [ 1732.646394] env[63379]: _type = "Task" [ 1732.646394] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.653421] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779896, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.669780] env[63379]: DEBUG nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1732.703558] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1732.703834] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1732.703992] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1732.704188] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1732.704335] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1732.704483] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1732.704693] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1732.704964] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1732.705209] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1732.705386] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1732.706509] env[63379]: DEBUG nova.virt.hardware [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1732.709030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca515aec-26f5-454c-ba6a-d2414e18a1d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.719503] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e94c4ae-510c-465b-9e25-eae0d6b972b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.758399] env[63379]: DEBUG nova.compute.manager [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Received event network-changed-1f122953-4fde-41ae-9895-0ef67cacb236 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1732.758615] env[63379]: DEBUG nova.compute.manager [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Refreshing instance network info cache due to event network-changed-1f122953-4fde-41ae-9895-0ef67cacb236. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1732.758857] env[63379]: DEBUG oslo_concurrency.lockutils [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] Acquiring lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.758984] env[63379]: DEBUG oslo_concurrency.lockutils [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] Acquired lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.759166] env[63379]: DEBUG nova.network.neutron [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Refreshing network info cache for port 1f122953-4fde-41ae-9895-0ef67cacb236 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1732.964431] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529e8e78-80a1-dd6f-3011-7dc4cca5478f, 'name': SearchDatastore_Task, 'duration_secs': 0.034928} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.964801] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.965131] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf/6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1732.965412] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bdc98a8-4bae-4949-a6b9-8eed6bf68dd6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.974375] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1732.974375] env[63379]: value = "task-1779897" [ 1732.974375] env[63379]: _type = "Task" [ 1732.974375] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.983085] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.022034] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0b2601ad-bbf9-42b8-b92a-6ff7c7790d43 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.796s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.024600] env[63379]: DEBUG nova.network.neutron [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Successfully updated port: 04979812-3eaa-4eb6-9a6f-306c8544d56b {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1733.076040] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.440s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.076664] env[63379]: DEBUG nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1733.082188] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.276s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.082255] env[63379]: DEBUG nova.objects.instance [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lazy-loading 'resources' on Instance uuid 5c4ae6c6-538a-4724-ad77-340d9c60c24a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1733.083396] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "7edacb20-8472-4e9d-9408-31947d9f284e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.083618] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.083825] env[63379]: INFO nova.compute.manager [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Shelving [ 1733.154886] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779896, 'name': CreateVM_Task, 'duration_secs': 0.508252} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.155073] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1733.155776] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.155950] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.156310] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1733.156599] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e732e8-31bf-4d05-ba84-8c8edd881a51 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.161476] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1733.161476] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e3d76a-308b-af21-5a3c-b2b0c31f53f2" [ 1733.161476] env[63379]: _type = "Task" [ 1733.161476] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.166316] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1733.166581] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369441', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'name': 'volume-9d889203-dc27-4007-a5c2-f62dd5709f2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19a41941-0679-4971-8a44-c95b13f5c294', 'attached_at': '', 'detached_at': '', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'serial': '9d889203-dc27-4007-a5c2-f62dd5709f2f'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1733.167333] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0dead0-e496-44a5-aca7-aac13ee077c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.172679] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e3d76a-308b-af21-5a3c-b2b0c31f53f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.186023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab167ac7-52fc-468f-9eb8-e99d868106a4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.210833] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] volume-9d889203-dc27-4007-a5c2-f62dd5709f2f/volume-9d889203-dc27-4007-a5c2-f62dd5709f2f.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1733.211210] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71d28863-375c-468c-8fd3-df305811dacd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.232625] env[63379]: DEBUG oslo_vmware.api [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1733.232625] env[63379]: value = "task-1779898" [ 1733.232625] env[63379]: _type = "Task" [ 1733.232625] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.244541] env[63379]: DEBUG oslo_vmware.api [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779898, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.485831] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779897, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.528051] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "refresh_cache-b9bc2562-9475-400e-9cf9-646b8f4c8cf2" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.528426] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "refresh_cache-b9bc2562-9475-400e-9cf9-646b8f4c8cf2" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.528426] env[63379]: DEBUG nova.network.neutron [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1733.532287] env[63379]: DEBUG nova.network.neutron [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Updated VIF entry in instance network info cache for port 1f122953-4fde-41ae-9895-0ef67cacb236. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1733.532654] env[63379]: DEBUG nova.network.neutron [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Updating instance_info_cache with network_info: [{"id": "1f122953-4fde-41ae-9895-0ef67cacb236", "address": "fa:16:3e:fc:b1:40", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f122953-4f", "ovs_interfaceid": "1f122953-4fde-41ae-9895-0ef67cacb236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.583405] env[63379]: DEBUG nova.compute.utils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1733.585277] env[63379]: DEBUG nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1733.585463] env[63379]: DEBUG nova.network.neutron [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1733.594127] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1733.594634] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cd88dc5-17cf-4c50-97f7-83f68fae6840 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.603244] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1733.603244] env[63379]: value = "task-1779899" [ 1733.603244] env[63379]: _type = "Task" [ 1733.603244] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.615507] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.632068] env[63379]: DEBUG nova.policy [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6b34522806e0473bb0bfd20aa9a27a06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '26e96d98928449efaf2999f78cd52fac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1733.675676] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e3d76a-308b-af21-5a3c-b2b0c31f53f2, 'name': SearchDatastore_Task, 'duration_secs': 0.009017} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.676053] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.676554] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1733.676811] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.677031] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.677247] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1733.677706] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80d4f0cb-025a-4f0a-840f-2c4e23866891 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.687332] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1733.687410] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1733.688305] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f918fb4-2978-4d7a-af1e-5d3ceac5c423 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.696097] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1733.696097] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef7877-832b-2697-e4ce-c18258c62c1f" [ 1733.696097] env[63379]: _type = "Task" [ 1733.696097] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.707180] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef7877-832b-2697-e4ce-c18258c62c1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.747541] env[63379]: DEBUG oslo_vmware.api [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.880371] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.880809] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.936017] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95f3857-1a51-4d00-93c1-a71802299917 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.944313] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b308f3d-f836-4b1b-bd9d-5e873595d2ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.985044] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be65fa5-7deb-40f1-8ba6-2ba42b016797 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.994961] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576101} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.997478] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf/6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1733.997843] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1733.998239] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-213d80d6-c265-4170-a9ba-86f44d2e236f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.001955] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8b838c-9125-486e-8702-02e9d9f32568 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.021918] env[63379]: DEBUG nova.compute.provider_tree [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1734.028022] env[63379]: DEBUG nova.network.neutron [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Successfully created port: cff629c0-a0d5-447f-8157-45a44ad90882 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1734.028022] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1734.028022] env[63379]: value = "task-1779900" [ 1734.028022] env[63379]: _type = "Task" [ 1734.028022] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.034513] env[63379]: DEBUG oslo_concurrency.lockutils [req-d406a576-1d48-4722-b0db-bb880dcc1281 req-275cbe37-0b9b-4bd6-a2f0-0790e9c62be9 service nova] Releasing lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.038758] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.063137] env[63379]: DEBUG nova.network.neutron [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1734.090855] env[63379]: DEBUG nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1734.113771] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779899, 'name': PowerOffVM_Task, 'duration_secs': 0.293644} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.116299] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1734.117118] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bd50b2-e120-465c-939e-4a6dafef9560 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.137442] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de264a8-20cc-4ee5-ad1c-ad73a6a5f088 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.211859] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ef7877-832b-2697-e4ce-c18258c62c1f, 'name': SearchDatastore_Task, 'duration_secs': 0.018617} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.211859] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ad95785-8246-48cf-b5c7-b8c571afff20 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.217903] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1734.217903] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ade72-e919-41a6-7b03-3e6e2277d797" [ 1734.217903] env[63379]: _type = "Task" [ 1734.217903] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.227266] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ade72-e919-41a6-7b03-3e6e2277d797, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.242250] env[63379]: DEBUG oslo_vmware.api [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779898, 'name': ReconfigVM_Task, 'duration_secs': 0.824424} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.242250] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfigured VM instance instance-0000003e to attach disk [datastore1] volume-9d889203-dc27-4007-a5c2-f62dd5709f2f/volume-9d889203-dc27-4007-a5c2-f62dd5709f2f.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1734.248111] env[63379]: DEBUG nova.network.neutron [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Updating instance_info_cache with network_info: [{"id": "04979812-3eaa-4eb6-9a6f-306c8544d56b", "address": "fa:16:3e:07:3b:cc", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04979812-3e", "ovs_interfaceid": "04979812-3eaa-4eb6-9a6f-306c8544d56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.249731] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97d63e44-6c66-4bf0-8ffd-872e8361b15f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.269819] env[63379]: DEBUG oslo_vmware.api [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1734.269819] env[63379]: value = "task-1779901" [ 1734.269819] env[63379]: _type = "Task" [ 1734.269819] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.277186] env[63379]: DEBUG oslo_vmware.api [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779901, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.384339] env[63379]: DEBUG nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1734.528831] env[63379]: DEBUG nova.scheduler.client.report [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1734.542074] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06626} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.542360] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1734.543181] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3e7f1d-1895-4961-9821-a4b40f08bb79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.568186] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf/6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1734.568722] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-979381aa-1c3d-4de6-9dbf-ea5e3a633b1c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.588408] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1734.588408] env[63379]: value = "task-1779902" [ 1734.588408] env[63379]: _type = "Task" [ 1734.588408] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.599330] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779902, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.648881] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1734.649231] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d8407eae-524a-4799-a62d-41c4391e1bdd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.658306] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1734.658306] env[63379]: value = "task-1779903" [ 1734.658306] env[63379]: _type = "Task" [ 1734.658306] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.667295] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779903, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.729557] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520ade72-e919-41a6-7b03-3e6e2277d797, 'name': SearchDatastore_Task, 'duration_secs': 0.009132} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.729844] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.730138] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c900bb90-b4a8-40a2-9436-5a0ced1dd919/c900bb90-b4a8-40a2-9436-5a0ced1dd919.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1734.730403] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-813b74d9-1a35-4ede-95fa-e2d3d095a0b1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.736738] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1734.736738] env[63379]: value = "task-1779904" [ 1734.736738] env[63379]: _type = "Task" [ 1734.736738] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.744466] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.762571] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "refresh_cache-b9bc2562-9475-400e-9cf9-646b8f4c8cf2" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.763350] env[63379]: DEBUG nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Instance network_info: |[{"id": "04979812-3eaa-4eb6-9a6f-306c8544d56b", "address": "fa:16:3e:07:3b:cc", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04979812-3e", "ovs_interfaceid": "04979812-3eaa-4eb6-9a6f-306c8544d56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1734.763483] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:3b:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04979812-3eaa-4eb6-9a6f-306c8544d56b', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1734.771456] env[63379]: DEBUG oslo.service.loopingcall [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.771755] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1734.774841] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2689c235-2777-4891-835e-e4bfc2428eb3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.791496] env[63379]: DEBUG nova.compute.manager [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Received event network-vif-plugged-04979812-3eaa-4eb6-9a6f-306c8544d56b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1734.791719] env[63379]: DEBUG oslo_concurrency.lockutils [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] Acquiring lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.791934] env[63379]: DEBUG oslo_concurrency.lockutils [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.792116] env[63379]: DEBUG oslo_concurrency.lockutils [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.792286] env[63379]: DEBUG nova.compute.manager [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] No waiting events found dispatching network-vif-plugged-04979812-3eaa-4eb6-9a6f-306c8544d56b {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1734.792464] env[63379]: WARNING nova.compute.manager [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Received unexpected event network-vif-plugged-04979812-3eaa-4eb6-9a6f-306c8544d56b for instance with vm_state building and task_state spawning. [ 1734.792640] env[63379]: DEBUG nova.compute.manager [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Received event network-changed-04979812-3eaa-4eb6-9a6f-306c8544d56b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1734.792798] env[63379]: DEBUG nova.compute.manager [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Refreshing instance network info cache due to event network-changed-04979812-3eaa-4eb6-9a6f-306c8544d56b. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1734.792984] env[63379]: DEBUG oslo_concurrency.lockutils [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] Acquiring lock "refresh_cache-b9bc2562-9475-400e-9cf9-646b8f4c8cf2" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.793142] env[63379]: DEBUG oslo_concurrency.lockutils [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] Acquired lock "refresh_cache-b9bc2562-9475-400e-9cf9-646b8f4c8cf2" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.793297] env[63379]: DEBUG nova.network.neutron [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Refreshing network info cache for port 04979812-3eaa-4eb6-9a6f-306c8544d56b {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1734.800766] env[63379]: DEBUG oslo_vmware.api [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779901, 'name': ReconfigVM_Task, 'duration_secs': 0.1475} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.802685] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369441', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'name': 'volume-9d889203-dc27-4007-a5c2-f62dd5709f2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19a41941-0679-4971-8a44-c95b13f5c294', 'attached_at': '', 'detached_at': '', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'serial': '9d889203-dc27-4007-a5c2-f62dd5709f2f'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1734.804345] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1734.804345] env[63379]: value = "task-1779905" [ 1734.804345] env[63379]: _type = "Task" [ 1734.804345] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.815620] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779905, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.909140] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.966743] env[63379]: DEBUG oslo_concurrency.lockutils [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.967121] env[63379]: DEBUG oslo_concurrency.lockutils [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.034504] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.036622] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.244s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.036822] env[63379]: DEBUG nova.objects.instance [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1735.058843] env[63379]: INFO nova.scheduler.client.report [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Deleted allocations for instance 5c4ae6c6-538a-4724-ad77-340d9c60c24a [ 1735.101583] env[63379]: DEBUG nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1735.112626] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779902, 'name': ReconfigVM_Task, 'duration_secs': 0.275166} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.113080] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf/6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1735.114569] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d6fbbc3-75ad-41f2-a29f-e49f6408864f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.127026] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1735.127026] env[63379]: value = "task-1779906" [ 1735.127026] env[63379]: _type = "Task" [ 1735.127026] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.137259] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1735.137563] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1735.137880] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1735.137918] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1735.138166] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1735.138374] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1735.138632] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1735.138812] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1735.139012] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1735.139220] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1735.139405] env[63379]: DEBUG nova.virt.hardware [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1735.140827] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb520234-8fff-481c-bc12-c25af239c1c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.149917] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779906, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.154501] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a290d64-ca87-4513-b86a-2959a003a611 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.179559] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779903, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.246762] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779904, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490485} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.247078] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] c900bb90-b4a8-40a2-9436-5a0ced1dd919/c900bb90-b4a8-40a2-9436-5a0ced1dd919.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1735.247304] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1735.247615] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abec6655-2443-4922-bb35-44c296186351 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.256403] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1735.256403] env[63379]: value = "task-1779907" [ 1735.256403] env[63379]: _type = "Task" [ 1735.256403] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.265073] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.319655] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779905, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.472057] env[63379]: DEBUG nova.compute.utils [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1735.562689] env[63379]: DEBUG nova.network.neutron [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Successfully updated port: cff629c0-a0d5-447f-8157-45a44ad90882 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1735.571381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbbcce13-e930-473b-b06b-25608ee4ac40 tempest-ServerRescueTestJSON-871022900 tempest-ServerRescueTestJSON-871022900-project-member] Lock "5c4ae6c6-538a-4724-ad77-340d9c60c24a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.231s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.639646] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779906, 'name': Rename_Task, 'duration_secs': 0.176724} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.639936] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1735.640207] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79caf009-4eb9-476f-9ad6-e9d8d390db54 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.646810] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1735.646810] env[63379]: value = "task-1779908" [ 1735.646810] env[63379]: _type = "Task" [ 1735.646810] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.654835] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.669730] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779903, 'name': CreateSnapshot_Task, 'duration_secs': 0.730015} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.669985] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1735.670735] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586ef362-c769-4b1a-9a52-466d4136f65f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.766563] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070856} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.766873] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1735.767699] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c7548a-8eaf-4cc9-87e5-49696e46ce63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.790028] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] c900bb90-b4a8-40a2-9436-5a0ced1dd919/c900bb90-b4a8-40a2-9436-5a0ced1dd919.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1735.790305] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ed9af6d-9564-4136-9a93-90488877dba0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.809491] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1735.809491] env[63379]: value = "task-1779909" [ 1735.809491] env[63379]: _type = "Task" [ 1735.809491] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.820407] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779905, 'name': CreateVM_Task, 'duration_secs': 0.556721} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.823314] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1735.823616] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779909, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.824282] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.824483] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.824821] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1735.825088] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f7b6fb0-b025-40f4-b775-c5c032185a7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.829270] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1735.829270] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5271dcc7-e43a-ead3-31d3-acb093031290" [ 1735.829270] env[63379]: _type = "Task" [ 1735.829270] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.836742] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5271dcc7-e43a-ead3-31d3-acb093031290, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.849242] env[63379]: DEBUG nova.objects.instance [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lazy-loading 'flavor' on Instance uuid 19a41941-0679-4971-8a44-c95b13f5c294 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1735.878776] env[63379]: DEBUG nova.network.neutron [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Updated VIF entry in instance network info cache for port 04979812-3eaa-4eb6-9a6f-306c8544d56b. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1735.879200] env[63379]: DEBUG nova.network.neutron [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Updating instance_info_cache with network_info: [{"id": "04979812-3eaa-4eb6-9a6f-306c8544d56b", "address": "fa:16:3e:07:3b:cc", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04979812-3e", "ovs_interfaceid": "04979812-3eaa-4eb6-9a6f-306c8544d56b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.975508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.045887] env[63379]: DEBUG oslo_concurrency.lockutils [None req-58788ce1-dc82-4fbe-aebf-7a731236fd70 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.047164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.575s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.049322] env[63379]: INFO nova.compute.claims [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1736.065320] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "refresh_cache-8877e0f7-091b-4a91-bb5c-fb7733e5f70c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.065482] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "refresh_cache-8877e0f7-091b-4a91-bb5c-fb7733e5f70c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.067545] env[63379]: DEBUG nova.network.neutron [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1736.157691] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779908, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.188514] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1736.188877] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-96f31383-8f36-49a4-be69-69ace1b9e125 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.196839] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1736.196839] env[63379]: value = "task-1779910" [ 1736.196839] env[63379]: _type = "Task" [ 1736.196839] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.205317] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779910, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.326094] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779909, 'name': ReconfigVM_Task, 'duration_secs': 0.289612} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.326405] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Reconfigured VM instance instance-00000052 to attach disk [datastore1] c900bb90-b4a8-40a2-9436-5a0ced1dd919/c900bb90-b4a8-40a2-9436-5a0ced1dd919.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1736.327025] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4935d857-e2b3-4fea-ba59-2544fdae889e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.334308] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1736.334308] env[63379]: value = "task-1779911" [ 1736.334308] env[63379]: _type = "Task" [ 1736.334308] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.341152] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5271dcc7-e43a-ead3-31d3-acb093031290, 'name': SearchDatastore_Task, 'duration_secs': 0.021774} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.342524] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.342524] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1736.342524] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.342524] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.343691] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1736.343691] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37e5a5e7-202c-4a0a-a730-93e74057b1fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.350419] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779911, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.354333] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f6d71d8b-c796-4625-b1b5-7004ece5879e tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.300s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.380309] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1736.380533] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1736.381578] env[63379]: DEBUG oslo_concurrency.lockutils [req-88fc0f3c-dcd7-48d4-b3f5-2afa7f45b9b3 req-2614795e-468a-4d30-b915-1b7a542449fe service nova] Releasing lock "refresh_cache-b9bc2562-9475-400e-9cf9-646b8f4c8cf2" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.381938] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f08445-86db-487e-b35e-ce48987d05e1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.387932] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1736.387932] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526b7b6a-4421-97c4-509f-a0b132efea45" [ 1736.387932] env[63379]: _type = "Task" [ 1736.387932] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.396804] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526b7b6a-4421-97c4-509f-a0b132efea45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.633903] env[63379]: DEBUG nova.network.neutron [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1736.664608] env[63379]: DEBUG oslo_vmware.api [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779908, 'name': PowerOnVM_Task, 'duration_secs': 0.519508} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.665381] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1736.665683] env[63379]: INFO nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1736.666054] env[63379]: DEBUG nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1736.667037] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f5b17a-a238-4a98-9b58-5729eb5f1796 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.709494] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779910, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.712184] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.712694] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.796129] env[63379]: INFO nova.compute.manager [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Rescuing [ 1736.796423] env[63379]: DEBUG oslo_concurrency.lockutils [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.796889] env[63379]: DEBUG oslo_concurrency.lockutils [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.797126] env[63379]: DEBUG nova.network.neutron [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1736.800263] env[63379]: DEBUG nova.network.neutron [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Updating instance_info_cache with network_info: [{"id": "cff629c0-a0d5-447f-8157-45a44ad90882", "address": "fa:16:3e:18:6c:f6", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff629c0-a0", "ovs_interfaceid": "cff629c0-a0d5-447f-8157-45a44ad90882", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.817406] env[63379]: DEBUG nova.compute.manager [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Received event network-vif-plugged-cff629c0-a0d5-447f-8157-45a44ad90882 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1736.817647] env[63379]: DEBUG oslo_concurrency.lockutils [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] Acquiring lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.817900] env[63379]: DEBUG oslo_concurrency.lockutils [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.818097] env[63379]: DEBUG oslo_concurrency.lockutils [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.818277] env[63379]: DEBUG nova.compute.manager [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] No waiting events found dispatching network-vif-plugged-cff629c0-a0d5-447f-8157-45a44ad90882 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1736.818447] env[63379]: WARNING nova.compute.manager [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Received unexpected event network-vif-plugged-cff629c0-a0d5-447f-8157-45a44ad90882 for instance with vm_state building and task_state spawning. [ 1736.818615] env[63379]: DEBUG nova.compute.manager [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Received event network-changed-cff629c0-a0d5-447f-8157-45a44ad90882 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1736.818786] env[63379]: DEBUG nova.compute.manager [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Refreshing instance network info cache due to event network-changed-cff629c0-a0d5-447f-8157-45a44ad90882. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1736.818971] env[63379]: DEBUG oslo_concurrency.lockutils [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] Acquiring lock "refresh_cache-8877e0f7-091b-4a91-bb5c-fb7733e5f70c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.844155] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779911, 'name': Rename_Task, 'duration_secs': 0.398052} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.844436] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1736.844719] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e9402c1-60f2-498f-904d-8eb768b5e7ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.851818] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1736.851818] env[63379]: value = "task-1779912" [ 1736.851818] env[63379]: _type = "Task" [ 1736.851818] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.859255] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.900715] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526b7b6a-4421-97c4-509f-a0b132efea45, 'name': SearchDatastore_Task, 'duration_secs': 0.018725} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.901644] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52f18c72-8030-4ccc-8117-b5c39de5006d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.906970] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1736.906970] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52af0a07-3acd-689a-0779-b0b5fdcde847" [ 1736.906970] env[63379]: _type = "Task" [ 1736.906970] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.914719] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52af0a07-3acd-689a-0779-b0b5fdcde847, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.065276] env[63379]: DEBUG oslo_concurrency.lockutils [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.065447] env[63379]: DEBUG oslo_concurrency.lockutils [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.065704] env[63379]: INFO nova.compute.manager [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Attaching volume 3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef to /dev/sdb [ 1737.108058] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fda7719-6f9d-45ed-a6de-233447deacb3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.117794] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528bf973-6b3c-488f-83e6-b127c680565a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.132236] env[63379]: DEBUG nova.virt.block_device [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating existing volume attachment record: 8ea7e946-aa8d-49f6-a948-3c15a2e0084a {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1737.190273] env[63379]: INFO nova.compute.manager [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Took 23.93 seconds to build instance. [ 1737.211841] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779910, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.222960] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1737.223151] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1737.306021] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "refresh_cache-8877e0f7-091b-4a91-bb5c-fb7733e5f70c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.306021] env[63379]: DEBUG nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Instance network_info: |[{"id": "cff629c0-a0d5-447f-8157-45a44ad90882", "address": "fa:16:3e:18:6c:f6", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff629c0-a0", "ovs_interfaceid": "cff629c0-a0d5-447f-8157-45a44ad90882", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1737.306374] env[63379]: DEBUG oslo_concurrency.lockutils [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] Acquired lock "refresh_cache-8877e0f7-091b-4a91-bb5c-fb7733e5f70c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.306756] env[63379]: DEBUG nova.network.neutron [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Refreshing network info cache for port cff629c0-a0d5-447f-8157-45a44ad90882 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1737.308404] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:6c:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cff629c0-a0d5-447f-8157-45a44ad90882', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1737.316876] env[63379]: DEBUG oslo.service.loopingcall [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1737.320839] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1737.321655] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3488b929-22f9-4894-a602-9efc93b1ed0a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.349335] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1737.349335] env[63379]: value = "task-1779914" [ 1737.349335] env[63379]: _type = "Task" [ 1737.349335] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.366244] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779914, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.370369] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779912, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.385953] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3fce2d-c477-4c62-b773-914e434877bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.395554] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b4d0df-33cc-4b14-9f64-a5c55ac34cec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.440929] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01320165-7b43-4a0a-b46e-96c9b9795bc3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.454904] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52af0a07-3acd-689a-0779-b0b5fdcde847, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.456462] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.456785] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b9bc2562-9475-400e-9cf9-646b8f4c8cf2/b9bc2562-9475-400e-9cf9-646b8f4c8cf2.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1737.457140] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddbfb27b-e504-43b2-81ae-cb96e6848da5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.460338] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f956b5ea-b0f0-472a-9702-ab8dd10c4acd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.475462] env[63379]: DEBUG nova.compute.provider_tree [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1737.478858] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1737.478858] env[63379]: value = "task-1779915" [ 1737.478858] env[63379]: _type = "Task" [ 1737.478858] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.489542] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.671181] env[63379]: DEBUG nova.network.neutron [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.693511] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a370e629-cb16-4805-a24e-0fe25aac888f tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.435s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.713630] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779910, 'name': CloneVM_Task, 'duration_secs': 1.482871} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.713630] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Created linked-clone VM from snapshot [ 1737.713630] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fe94fa-8619-4c8d-bdc0-8637f259ca1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.723448] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Uploading image efafc8d3-6d68-4162-8ee3-9d41c7ec3367 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1737.757351] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1737.757351] env[63379]: value = "vm-369448" [ 1737.757351] env[63379]: _type = "VirtualMachine" [ 1737.757351] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1737.757692] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-273143c0-4040-4081-97d4-31b94cece5b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.767301] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lease: (returnval){ [ 1737.767301] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5216a332-d943-404d-eee0-45e889ed6eb0" [ 1737.767301] env[63379]: _type = "HttpNfcLease" [ 1737.767301] env[63379]: } obtained for exporting VM: (result){ [ 1737.767301] env[63379]: value = "vm-369448" [ 1737.767301] env[63379]: _type = "VirtualMachine" [ 1737.767301] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1737.767766] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the lease: (returnval){ [ 1737.767766] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5216a332-d943-404d-eee0-45e889ed6eb0" [ 1737.767766] env[63379]: _type = "HttpNfcLease" [ 1737.767766] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1737.777441] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1737.777441] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5216a332-d943-404d-eee0-45e889ed6eb0" [ 1737.777441] env[63379]: _type = "HttpNfcLease" [ 1737.777441] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1737.863178] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779914, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.869720] env[63379]: DEBUG oslo_vmware.api [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779912, 'name': PowerOnVM_Task, 'duration_secs': 0.640735} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.870087] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1737.870378] env[63379]: INFO nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Took 7.58 seconds to spawn the instance on the hypervisor. [ 1737.870635] env[63379]: DEBUG nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1737.871553] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e255ce05-7364-411a-ad29-8247dd52f3af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.948560] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.948855] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.949100] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.949297] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.949469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.951670] env[63379]: INFO nova.compute.manager [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Terminating instance [ 1737.953693] env[63379]: DEBUG nova.compute.manager [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1737.953973] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1737.954946] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ef5367-b2b9-412e-837a-be7b0971ce62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.963578] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1737.963941] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2690751-62f9-4957-b6d0-1246d4a18ff4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.971260] env[63379]: DEBUG oslo_vmware.api [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1737.971260] env[63379]: value = "task-1779919" [ 1737.971260] env[63379]: _type = "Task" [ 1737.971260] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.993911] env[63379]: DEBUG oslo_vmware.api [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.001311] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779915, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.006608] env[63379]: ERROR nova.scheduler.client.report [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [req-01d04814-3d82-4ee0-9980-d52534c07b19] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-01d04814-3d82-4ee0-9980-d52534c07b19"}]} [ 1738.026104] env[63379]: DEBUG nova.scheduler.client.report [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1738.042388] env[63379]: DEBUG nova.scheduler.client.report [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1738.042706] env[63379]: DEBUG nova.compute.provider_tree [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1738.057652] env[63379]: DEBUG nova.scheduler.client.report [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1738.075219] env[63379]: DEBUG nova.network.neutron [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Updated VIF entry in instance network info cache for port cff629c0-a0d5-447f-8157-45a44ad90882. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1738.075597] env[63379]: DEBUG nova.network.neutron [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Updating instance_info_cache with network_info: [{"id": "cff629c0-a0d5-447f-8157-45a44ad90882", "address": "fa:16:3e:18:6c:f6", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcff629c0-a0", "ovs_interfaceid": "cff629c0-a0d5-447f-8157-45a44ad90882", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.077614] env[63379]: DEBUG nova.scheduler.client.report [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1738.174851] env[63379]: DEBUG oslo_concurrency.lockutils [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.276812] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1738.276812] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5216a332-d943-404d-eee0-45e889ed6eb0" [ 1738.276812] env[63379]: _type = "HttpNfcLease" [ 1738.276812] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1738.277218] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1738.277218] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5216a332-d943-404d-eee0-45e889ed6eb0" [ 1738.277218] env[63379]: _type = "HttpNfcLease" [ 1738.277218] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1738.277939] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6670455d-386f-4ad5-8ff4-f258cfdb1f0e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.288014] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525b72c3-394c-439c-38d9-a03f0920dcce/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1738.288211] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525b72c3-394c-439c-38d9-a03f0920dcce/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1738.359430] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779914, 'name': CreateVM_Task, 'duration_secs': 0.557572} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.359597] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1738.360233] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.360448] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.360743] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1738.363542] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c588a15f-2060-4386-bd75-ba4b63eca283 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.368743] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1738.368743] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bce4fe-c391-eed9-16e6-42cd2f35ed2a" [ 1738.368743] env[63379]: _type = "Task" [ 1738.368743] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.377170] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bce4fe-c391-eed9-16e6-42cd2f35ed2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.390907] env[63379]: INFO nova.compute.manager [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Took 24.38 seconds to build instance. [ 1738.403845] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f2463af7-c5b3-43db-90ee-a8486081c35e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.421715] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c970bb68-834d-4ce9-9475-135796ee3714 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.431851] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce67106-d3c6-479d-8944-8e09cef7e71a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.471882] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5fd0e6-650f-4822-ae0f-8340de2610f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.487421] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48712aa2-93e5-49de-b3b8-ff9ebc7b7747 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.489266] env[63379]: DEBUG oslo_vmware.api [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779919, 'name': PowerOffVM_Task, 'duration_secs': 0.204853} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.491723] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1738.491902] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1738.492505] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfe708c3-2d7d-4f73-b84a-cbe91af6e625 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.502175] env[63379]: DEBUG nova.compute.provider_tree [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1738.506083] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779915, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540818} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.506540] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b9bc2562-9475-400e-9cf9-646b8f4c8cf2/b9bc2562-9475-400e-9cf9-646b8f4c8cf2.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1738.506792] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1738.507038] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb968bfd-99eb-4618-bdb6-a8811b5443e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.515020] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1738.515020] env[63379]: value = "task-1779921" [ 1738.515020] env[63379]: _type = "Task" [ 1738.515020] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.521425] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779921, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.580855] env[63379]: DEBUG oslo_concurrency.lockutils [req-c3aea83c-0251-4ad8-9f6f-85926cd87afd req-7cbe3c82-08c7-4ff3-9127-8ac54b1ce93a service nova] Releasing lock "refresh_cache-8877e0f7-091b-4a91-bb5c-fb7733e5f70c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.587879] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1738.588125] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1738.588438] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleting the datastore file [datastore1] 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1738.588799] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72382562-943c-4e68-a22f-983d7e4c7f55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.595313] env[63379]: DEBUG oslo_vmware.api [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for the task: (returnval){ [ 1738.595313] env[63379]: value = "task-1779922" [ 1738.595313] env[63379]: _type = "Task" [ 1738.595313] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.603972] env[63379]: DEBUG oslo_vmware.api [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.712839] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1738.713203] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08254fbf-e916-4d57-bff3-adbbba64cd48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.721367] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1738.721367] env[63379]: value = "task-1779923" [ 1738.721367] env[63379]: _type = "Task" [ 1738.721367] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.730038] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.881769] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bce4fe-c391-eed9-16e6-42cd2f35ed2a, 'name': SearchDatastore_Task, 'duration_secs': 0.012924} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.881769] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.881769] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1738.881769] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.882043] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.882079] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1738.882485] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d24d6834-178b-4893-9807-d3161a1a3e11 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.893027] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1738.893252] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1738.894216] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74297bba-f088-4707-88d3-dbb79d1c0fc2 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.896s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.894763] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d71ce632-6edc-4eba-92b3-45f282ee61e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.899911] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1738.899911] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52239c1e-441f-f2f0-6227-65ca14457bf1" [ 1738.899911] env[63379]: _type = "Task" [ 1738.899911] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.908020] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52239c1e-441f-f2f0-6227-65ca14457bf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.009060] env[63379]: DEBUG nova.scheduler.client.report [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1739.025684] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779921, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.236607} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.026816] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1739.028134] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9600d0a-6f75-414f-b8de-c6830f9bb80f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.051625] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] b9bc2562-9475-400e-9cf9-646b8f4c8cf2/b9bc2562-9475-400e-9cf9-646b8f4c8cf2.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1739.051625] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bd2e88f-81c3-4abc-8404-86caf8b4ac7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.072530] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1739.072530] env[63379]: value = "task-1779924" [ 1739.072530] env[63379]: _type = "Task" [ 1739.072530] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.081428] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779924, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.105354] env[63379]: DEBUG oslo_vmware.api [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Task: {'id': task-1779922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402786} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.106486] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1739.106486] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1739.106486] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1739.106760] env[63379]: INFO nova.compute.manager [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1739.107059] env[63379]: DEBUG oslo.service.loopingcall [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.107336] env[63379]: DEBUG nova.compute.manager [-] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1739.107535] env[63379]: DEBUG nova.network.neutron [-] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1739.230880] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779923, 'name': PowerOffVM_Task, 'duration_secs': 0.323184} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.231323] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1739.232329] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fac1655-9ada-4f7f-954d-33e9db18229a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.257502] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbbe137-cb84-482c-9785-364b82cf13a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.290435] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1739.290899] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27f11079-d8d4-488c-8b0b-34b05fa23781 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.297897] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1739.297897] env[63379]: value = "task-1779925" [ 1739.297897] env[63379]: _type = "Task" [ 1739.297897] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.308573] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1739.308849] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1739.309097] env[63379]: DEBUG oslo_concurrency.lockutils [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.413270] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52239c1e-441f-f2f0-6227-65ca14457bf1, 'name': SearchDatastore_Task, 'duration_secs': 0.016423} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.414990] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73c8cdc3-4d06-494d-aade-ae3d0fda6a43 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.422096] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1739.422096] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5273c7a2-3738-209a-f5a9-6e86079c258f" [ 1739.422096] env[63379]: _type = "Task" [ 1739.422096] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.430308] env[63379]: DEBUG nova.compute.manager [req-c44a5ee6-cf86-423a-a053-cf6fe924e442 req-e6e6d7f0-76ad-4b59-a110-d0edf4033b6b service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Received event network-vif-deleted-fec55523-e298-4c52-8e45-f7a01d691c42 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1739.430413] env[63379]: INFO nova.compute.manager [req-c44a5ee6-cf86-423a-a053-cf6fe924e442 req-e6e6d7f0-76ad-4b59-a110-d0edf4033b6b service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Neutron deleted interface fec55523-e298-4c52-8e45-f7a01d691c42; detaching it from the instance and deleting it from the info cache [ 1739.430837] env[63379]: DEBUG nova.network.neutron [req-c44a5ee6-cf86-423a-a053-cf6fe924e442 req-e6e6d7f0-76ad-4b59-a110-d0edf4033b6b service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.439102] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5273c7a2-3738-209a-f5a9-6e86079c258f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.514019] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.467s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.514719] env[63379]: DEBUG nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1739.517492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.681s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.517904] env[63379]: DEBUG nova.objects.instance [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'resources' on Instance uuid fad7a2dd-291f-4105-95a6-56bdbcc7acb4 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1739.583440] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779924, 'name': ReconfigVM_Task, 'duration_secs': 0.328435} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.583440] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Reconfigured VM instance instance-00000053 to attach disk [datastore1] b9bc2562-9475-400e-9cf9-646b8f4c8cf2/b9bc2562-9475-400e-9cf9-646b8f4c8cf2.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1739.585063] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c23bcb8-82f1-4bac-b041-a6c9204159bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.592029] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1739.592029] env[63379]: value = "task-1779926" [ 1739.592029] env[63379]: _type = "Task" [ 1739.592029] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.600220] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779926, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.897086] env[63379]: DEBUG nova.network.neutron [-] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.934583] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5273c7a2-3738-209a-f5a9-6e86079c258f, 'name': SearchDatastore_Task, 'duration_secs': 0.024162} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.934964] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60378a23-8a67-4a84-8c17-20f0ef21cb1a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.937214] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.937488] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8877e0f7-091b-4a91-bb5c-fb7733e5f70c/8877e0f7-091b-4a91-bb5c-fb7733e5f70c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1739.937827] env[63379]: DEBUG oslo_concurrency.lockutils [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.938034] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1739.938257] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c214d6d1-1ae7-4e32-a7ad-c1a9389b6bc8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.940845] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0503f49-72d2-46f7-8f7e-7dd6364035f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.948687] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093a3dd1-6af6-494c-9f5a-ecd7a8b141b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.962221] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1739.962221] env[63379]: value = "task-1779928" [ 1739.962221] env[63379]: _type = "Task" [ 1739.962221] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.962482] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.962684] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1739.963914] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53dcc154-d157-44ce-b500-b8a073e96949 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.976510] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.976912] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1739.976912] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52631c50-758a-c162-2485-acfd91f3120d" [ 1739.976912] env[63379]: _type = "Task" [ 1739.976912] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.990025] env[63379]: DEBUG nova.compute.manager [req-c44a5ee6-cf86-423a-a053-cf6fe924e442 req-e6e6d7f0-76ad-4b59-a110-d0edf4033b6b service nova] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Detach interface failed, port_id=fec55523-e298-4c52-8e45-f7a01d691c42, reason: Instance 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1740.000494] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52631c50-758a-c162-2485-acfd91f3120d, 'name': SearchDatastore_Task, 'duration_secs': 0.010779} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.001488] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57b57069-5d42-4c14-8dee-05c03894e2aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.008193] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1740.008193] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527765bf-508e-0f96-17a9-ccf6def8d670" [ 1740.008193] env[63379]: _type = "Task" [ 1740.008193] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.018826] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527765bf-508e-0f96-17a9-ccf6def8d670, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.021182] env[63379]: DEBUG nova.compute.utils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1740.025523] env[63379]: DEBUG nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1740.027287] env[63379]: DEBUG nova.network.neutron [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1740.077913] env[63379]: DEBUG nova.policy [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd786092d394d1a9b444051664ac7ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f28f4532d464e6eb90ab75799990c85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1740.105876] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779926, 'name': Rename_Task, 'duration_secs': 0.174793} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.106359] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1740.106593] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b40284c-f76a-4686-87ca-49580c41a20a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.114500] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1740.114500] env[63379]: value = "task-1779929" [ 1740.114500] env[63379]: _type = "Task" [ 1740.114500] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.123646] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.303021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0c58cc-ac43-4308-93de-64eda8680f36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.310215] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932aa837-e05b-4bb7-bec3-082dc85079d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.344975] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e3a655-9572-47e9-8856-a37dc86f0ca6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.353605] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521c54b1-f515-4fe3-b455-fc50a0afa199 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.367727] env[63379]: DEBUG nova.compute.provider_tree [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.381925] env[63379]: DEBUG nova.network.neutron [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Successfully created port: 82d4cc07-9772-4f7e-87ba-1ef653e88fd3 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1740.399818] env[63379]: INFO nova.compute.manager [-] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Took 1.29 seconds to deallocate network for instance. [ 1740.478650] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779928, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.524197] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527765bf-508e-0f96-17a9-ccf6def8d670, 'name': SearchDatastore_Task, 'duration_secs': 0.012391} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.524820] env[63379]: DEBUG oslo_concurrency.lockutils [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.525287] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. {{(pid=63379) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1740.527019] env[63379]: DEBUG nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1740.528741] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-371a5256-a7be-4792-b954-8e5350b3017d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.537065] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1740.537065] env[63379]: value = "task-1779930" [ 1740.537065] env[63379]: _type = "Task" [ 1740.537065] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.546240] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.625567] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779929, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.871547] env[63379]: DEBUG nova.scheduler.client.report [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1740.905913] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.976706] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779928, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59546} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.977048] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8877e0f7-091b-4a91-bb5c-fb7733e5f70c/8877e0f7-091b-4a91-bb5c-fb7733e5f70c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1740.977290] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1740.977611] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d0ce47d-6317-4432-b2bb-c13d9285ac40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.984566] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1740.984566] env[63379]: value = "task-1779931" [ 1740.984566] env[63379]: _type = "Task" [ 1740.984566] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.047991] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779930, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.126567] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779929, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.376774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.859s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.379610] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.589s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.381474] env[63379]: INFO nova.compute.claims [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1741.404406] env[63379]: INFO nova.scheduler.client.report [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted allocations for instance fad7a2dd-291f-4105-95a6-56bdbcc7acb4 [ 1741.494547] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179298} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.494869] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1741.495711] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a932306e-4433-4866-a00f-584a95e207df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.521063] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 8877e0f7-091b-4a91-bb5c-fb7733e5f70c/8877e0f7-091b-4a91-bb5c-fb7733e5f70c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1741.521644] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e3a3da5-aab7-4da3-b253-341be45f57fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.538357] env[63379]: DEBUG nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1741.544199] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1741.544199] env[63379]: value = "task-1779932" [ 1741.544199] env[63379]: _type = "Task" [ 1741.544199] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.551193] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.707523} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.551783] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk. [ 1741.552754] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b987cd3-aca2-4856-b4f6-8575a80ef6cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.558137] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779932, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.583610] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1741.586110] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1741.586356] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1741.586524] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1741.586718] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1741.586872] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1741.587036] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1741.587260] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1741.587428] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1741.587602] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1741.587771] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1741.587956] env[63379]: DEBUG nova.virt.hardware [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1741.588262] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5e34972-f2b0-4e4c-978e-d8414fdfd115 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.601892] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ff50a8-6dc1-48e4-a710-4e5c201bdf7d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.611512] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d887cd1-00f2-4ff1-a893-2014366d37e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.615366] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1741.615366] env[63379]: value = "task-1779933" [ 1741.615366] env[63379]: _type = "Task" [ 1741.615366] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.637009] env[63379]: DEBUG oslo_vmware.api [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779929, 'name': PowerOnVM_Task, 'duration_secs': 1.494301} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.642062] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1741.642384] env[63379]: INFO nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1741.642629] env[63379]: DEBUG nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1741.643068] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.644204] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074fdf1a-0d8e-4d53-b98d-f601569cfdbc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.808389] env[63379]: DEBUG nova.compute.manager [req-24b4d70d-3fa3-4ab5-8af9-2a09506ff106 req-29667983-57ed-460e-b0a1-b8a7d878f5df service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Received event network-vif-plugged-82d4cc07-9772-4f7e-87ba-1ef653e88fd3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1741.808389] env[63379]: DEBUG oslo_concurrency.lockutils [req-24b4d70d-3fa3-4ab5-8af9-2a09506ff106 req-29667983-57ed-460e-b0a1-b8a7d878f5df service nova] Acquiring lock "eda684fa-1595-4985-beb7-c298049411bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.808807] env[63379]: DEBUG oslo_concurrency.lockutils [req-24b4d70d-3fa3-4ab5-8af9-2a09506ff106 req-29667983-57ed-460e-b0a1-b8a7d878f5df service nova] Lock "eda684fa-1595-4985-beb7-c298049411bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.809157] env[63379]: DEBUG oslo_concurrency.lockutils [req-24b4d70d-3fa3-4ab5-8af9-2a09506ff106 req-29667983-57ed-460e-b0a1-b8a7d878f5df service nova] Lock "eda684fa-1595-4985-beb7-c298049411bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.809460] env[63379]: DEBUG nova.compute.manager [req-24b4d70d-3fa3-4ab5-8af9-2a09506ff106 req-29667983-57ed-460e-b0a1-b8a7d878f5df service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] No waiting events found dispatching network-vif-plugged-82d4cc07-9772-4f7e-87ba-1ef653e88fd3 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1741.809780] env[63379]: WARNING nova.compute.manager [req-24b4d70d-3fa3-4ab5-8af9-2a09506ff106 req-29667983-57ed-460e-b0a1-b8a7d878f5df service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Received unexpected event network-vif-plugged-82d4cc07-9772-4f7e-87ba-1ef653e88fd3 for instance with vm_state building and task_state spawning. [ 1741.910414] env[63379]: DEBUG nova.network.neutron [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Successfully updated port: 82d4cc07-9772-4f7e-87ba-1ef653e88fd3 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1741.915689] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cd79a38d-e171-4897-b885-d0d33043bf5d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "fad7a2dd-291f-4105-95a6-56bdbcc7acb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.124s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.055744] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.125940] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.164918] env[63379]: INFO nova.compute.manager [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Took 27.94 seconds to build instance. [ 1742.181944] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1742.182231] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369451', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'name': 'volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d76a28f-822d-4b4f-be2f-2ad3371b3979', 'attached_at': '', 'detached_at': '', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'serial': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1742.183101] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cd4da5-e0c5-4d8a-b55a-d205f3f2300e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.201146] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06a8903-81e8-4721-8c57-86ef99466492 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.227495] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef/volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1742.227795] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3492b15c-b8e5-48f7-923d-3d21a78ff43b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.246087] env[63379]: DEBUG oslo_vmware.api [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1742.246087] env[63379]: value = "task-1779934" [ 1742.246087] env[63379]: _type = "Task" [ 1742.246087] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.254518] env[63379]: DEBUG oslo_vmware.api [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779934, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.414660] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.415817] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.415817] env[63379]: DEBUG nova.network.neutron [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1742.554888] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779932, 'name': ReconfigVM_Task, 'duration_secs': 0.98524} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.555202] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 8877e0f7-091b-4a91-bb5c-fb7733e5f70c/8877e0f7-091b-4a91-bb5c-fb7733e5f70c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1742.555872] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86ae0dda-24ed-48e3-90c0-c1657ae44748 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.563396] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1742.563396] env[63379]: value = "task-1779935" [ 1742.563396] env[63379]: _type = "Task" [ 1742.563396] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.572799] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779935, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.626999] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779933, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.667313] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83fb4278-d6f5-4229-9a85-d5f9fcdecb40 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.451s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.724154] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339a9ad9-ada1-4e99-b3ca-6ecdc7b3702d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.732948] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e4283d-f87f-4c4b-b5ec-87fd4a32258f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.768419] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100157da-035e-48e4-8d37-9fcc31586ab9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.777953] env[63379]: DEBUG oslo_vmware.api [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.781582] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc54bb3c-ed47-452f-830b-98d50448b8d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.796428] env[63379]: DEBUG nova.compute.provider_tree [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1742.947778] env[63379]: DEBUG nova.network.neutron [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1743.074430] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779935, 'name': Rename_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.090261] env[63379]: DEBUG nova.network.neutron [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance_info_cache with network_info: [{"id": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "address": "fa:16:3e:63:27:ae", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d4cc07-97", "ovs_interfaceid": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.129246] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779933, 'name': ReconfigVM_Task, 'duration_secs': 1.035888} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.129246] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48-rescue.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1743.129539] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9dfba69-2572-407c-aefa-931ae6b45908 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.158594] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bb1a051-1b08-4f06-bf8b-48635bc1ffd6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.174241] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1743.174241] env[63379]: value = "task-1779936" [ 1743.174241] env[63379]: _type = "Task" [ 1743.174241] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.183702] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.274496] env[63379]: DEBUG oslo_vmware.api [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779934, 'name': ReconfigVM_Task, 'duration_secs': 0.68272} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.274793] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Reconfigured VM instance instance-00000034 to attach disk [datastore1] volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef/volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1743.279531] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30a752cf-980e-4179-ae6d-726c9ae890fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.295420] env[63379]: DEBUG oslo_vmware.api [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1743.295420] env[63379]: value = "task-1779937" [ 1743.295420] env[63379]: _type = "Task" [ 1743.295420] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.305515] env[63379]: DEBUG oslo_vmware.api [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.329836] env[63379]: DEBUG nova.scheduler.client.report [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1743.330138] env[63379]: DEBUG nova.compute.provider_tree [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 108 to 109 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1743.330330] env[63379]: DEBUG nova.compute.provider_tree [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1743.574331] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779935, 'name': Rename_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.592967] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.593303] env[63379]: DEBUG nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Instance network_info: |[{"id": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "address": "fa:16:3e:63:27:ae", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d4cc07-97", "ovs_interfaceid": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1743.593723] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:27:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82d4cc07-9772-4f7e-87ba-1ef653e88fd3', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1743.601360] env[63379]: DEBUG oslo.service.loopingcall [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1743.601566] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eda684fa-1595-4985-beb7-c298049411bf] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1743.601787] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d31d135-b22b-442f-8dd0-9d2ac6cddeba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.624816] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1743.624816] env[63379]: value = "task-1779938" [ 1743.624816] env[63379]: _type = "Task" [ 1743.624816] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.633047] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779938, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.685016] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779936, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.792172] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1743.792631] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.792909] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.793252] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.793473] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.793667] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.793950] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.794171] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1743.794465] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.808137] env[63379]: DEBUG oslo_vmware.api [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779937, 'name': ReconfigVM_Task, 'duration_secs': 0.206387} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.809510] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369451', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'name': 'volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d76a28f-822d-4b4f-be2f-2ad3371b3979', 'attached_at': '', 'detached_at': '', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'serial': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1743.836894] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.837752] env[63379]: DEBUG nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1743.843049] env[63379]: DEBUG nova.compute.manager [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Received event network-changed-82d4cc07-9772-4f7e-87ba-1ef653e88fd3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1743.843369] env[63379]: DEBUG nova.compute.manager [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Refreshing instance network info cache due to event network-changed-82d4cc07-9772-4f7e-87ba-1ef653e88fd3. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1743.843679] env[63379]: DEBUG oslo_concurrency.lockutils [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] Acquiring lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.843954] env[63379]: DEBUG oslo_concurrency.lockutils [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] Acquired lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.844250] env[63379]: DEBUG nova.network.neutron [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Refreshing network info cache for port 82d4cc07-9772-4f7e-87ba-1ef653e88fd3 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1743.845846] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.497s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.847490] env[63379]: INFO nova.compute.claims [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1744.075429] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779935, 'name': Rename_Task, 'duration_secs': 1.214936} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.075922] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1744.075999] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ff82d4a-c0b0-4b37-a882-9ed749a709c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.082844] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1744.082844] env[63379]: value = "task-1779939" [ 1744.082844] env[63379]: _type = "Task" [ 1744.082844] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.094131] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.136011] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779938, 'name': CreateVM_Task, 'duration_secs': 0.367597} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.136241] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eda684fa-1595-4985-beb7-c298049411bf] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1744.137037] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.137248] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.137630] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1744.137923] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-877e64b8-240b-4cd5-bc0d-e53500fff3e1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.143579] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1744.143579] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dc2082-63fc-cf0e-38cb-6ef94ceee210" [ 1744.143579] env[63379]: _type = "Task" [ 1744.143579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.152543] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dc2082-63fc-cf0e-38cb-6ef94ceee210, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.189024] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779936, 'name': ReconfigVM_Task, 'duration_secs': 0.770956} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.189024] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1744.189024] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a66e037d-55b0-460c-a72b-2a51361ca128 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.196087] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1744.196087] env[63379]: value = "task-1779940" [ 1744.196087] env[63379]: _type = "Task" [ 1744.196087] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.204866] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.303287] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.347812] env[63379]: DEBUG nova.compute.utils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1744.349487] env[63379]: DEBUG nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1744.349731] env[63379]: DEBUG nova.network.neutron [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1744.446214] env[63379]: DEBUG nova.policy [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72e988c5c3c549ed87ee7f48a55097c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02da0ce83a89450cb5011c53056cfa0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1744.590132] env[63379]: DEBUG nova.network.neutron [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updated VIF entry in instance network info cache for port 82d4cc07-9772-4f7e-87ba-1ef653e88fd3. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1744.590557] env[63379]: DEBUG nova.network.neutron [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance_info_cache with network_info: [{"id": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "address": "fa:16:3e:63:27:ae", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d4cc07-97", "ovs_interfaceid": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.597989] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779939, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.655530] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dc2082-63fc-cf0e-38cb-6ef94ceee210, 'name': SearchDatastore_Task, 'duration_secs': 0.011101} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.655872] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.656148] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1744.656407] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.656594] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.656786] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1744.657087] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d8f4088-3b8e-460e-91e0-5b3a37c4bfd6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.667967] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1744.668298] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1744.668994] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a28357db-0387-43fb-8628-098d642f4cea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.674690] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1744.674690] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52795add-786c-3f80-acec-ba97120c06ab" [ 1744.674690] env[63379]: _type = "Task" [ 1744.674690] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.683646] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52795add-786c-3f80-acec-ba97120c06ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.706674] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779940, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.763174] env[63379]: DEBUG nova.network.neutron [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Successfully created port: 3f904953-8743-428b-8084-4936ee47a1df {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1744.855388] env[63379]: DEBUG nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1744.861840] env[63379]: DEBUG nova.objects.instance [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1745.095579] env[63379]: DEBUG oslo_concurrency.lockutils [req-e3e8e28c-5e3f-4c68-adc9-653994c187e7 req-16806aae-7e30-4e0f-8a16-85b7d1d75124 service nova] Releasing lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.096024] env[63379]: DEBUG oslo_vmware.api [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779939, 'name': PowerOnVM_Task, 'duration_secs': 0.587952} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.096615] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1745.096835] env[63379]: INFO nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Took 10.00 seconds to spawn the instance on the hypervisor. [ 1745.097031] env[63379]: DEBUG nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1745.097920] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2041ad7b-12d3-437d-80e5-05a5f65b7179 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.174648] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b670122-b736-4030-bfd6-e714e989a52c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.190083] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485fc3e9-2b31-4769-8c3c-44dfedb072dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.193242] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52795add-786c-3f80-acec-ba97120c06ab, 'name': SearchDatastore_Task, 'duration_secs': 0.014009} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.194308] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a277b24-1b06-46d9-a02c-90309ec4daa0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.224052] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d2da01-0b44-4fbc-89cc-0d39a935c490 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.227769] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1745.227769] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fc0b19-7423-0005-8da8-a442d799a19e" [ 1745.227769] env[63379]: _type = "Task" [ 1745.227769] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.237061] env[63379]: DEBUG oslo_vmware.api [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779940, 'name': PowerOnVM_Task, 'duration_secs': 0.747045} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.237722] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1745.240335] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cda0060-2dd6-4ce4-ade4-a8742d87a71d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.245338] env[63379]: DEBUG nova.compute.manager [None req-20af2004-93c7-4d6b-972e-6cfd34b23ff2 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1745.249410] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e364cf-6d62-4255-877c-8bad7faf540a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.251765] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fc0b19-7423-0005-8da8-a442d799a19e, 'name': SearchDatastore_Task, 'duration_secs': 0.015344} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.252345] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.252597] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] eda684fa-1595-4985-beb7-c298049411bf/eda684fa-1595-4985-beb7-c298049411bf.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1745.253524] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4434eebf-f156-4a28-9b99-e32f62293833 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.263158] env[63379]: DEBUG nova.compute.provider_tree [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.271292] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1745.271292] env[63379]: value = "task-1779941" [ 1745.271292] env[63379]: _type = "Task" [ 1745.271292] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.280841] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.368052] env[63379]: DEBUG oslo_concurrency.lockutils [None req-22500bcc-cce7-4516-8b3c-1547e93a4064 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.302s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.518258] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.518258] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.518258] env[63379]: DEBUG nova.compute.manager [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1745.518258] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d88650a-b969-4874-8292-17a65194c443 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.528304] env[63379]: DEBUG nova.compute.manager [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1745.528930] env[63379]: DEBUG nova.objects.instance [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1745.621211] env[63379]: INFO nova.compute.manager [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Took 31.07 seconds to build instance. [ 1745.766935] env[63379]: DEBUG nova.scheduler.client.report [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1745.784835] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779941, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.865534] env[63379]: DEBUG nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1745.896133] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1745.896413] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1745.897131] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1745.897391] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1745.897629] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1745.897749] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1745.897993] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1745.898207] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1745.898399] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1745.898573] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1745.898763] env[63379]: DEBUG nova.virt.hardware [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1745.899689] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1da1e9-26d9-4800-8d99-85fcbcc0f179 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.911573] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ee43ec-3797-4dec-9d57-22e70bdb2be4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.034086] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1746.034300] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42ff67a8-39e5-47f7-987f-8a73de0a5810 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.042356] env[63379]: DEBUG oslo_vmware.api [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1746.042356] env[63379]: value = "task-1779942" [ 1746.042356] env[63379]: _type = "Task" [ 1746.042356] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.050455] env[63379]: DEBUG oslo_vmware.api [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.123297] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1be356c1-7518-461d-bd28-de29935ea524 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.579s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.197997] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-f983d089-7cfc-46a5-8f8d-f49f67aef1da-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.197997] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-f983d089-7cfc-46a5-8f8d-f49f67aef1da-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.198307] env[63379]: DEBUG nova.objects.instance [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'flavor' on Instance uuid f983d089-7cfc-46a5-8f8d-f49f67aef1da {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1746.281034] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.281034] env[63379]: DEBUG nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1746.284345] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.375s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.286689] env[63379]: INFO nova.compute.claims [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1746.298342] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663162} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.301190] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] eda684fa-1595-4985-beb7-c298049411bf/eda684fa-1595-4985-beb7-c298049411bf.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1746.301190] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1746.301190] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42d7a0d7-b65f-44c1-812d-b0716120136e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.310162] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1746.310162] env[63379]: value = "task-1779943" [ 1746.310162] env[63379]: _type = "Task" [ 1746.310162] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.325540] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.408616] env[63379]: INFO nova.compute.manager [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Unrescuing [ 1746.409055] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.409319] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquired lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.409576] env[63379]: DEBUG nova.network.neutron [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1746.553613] env[63379]: DEBUG oslo_vmware.api [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779942, 'name': PowerOffVM_Task, 'duration_secs': 0.32157} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.553613] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1746.553836] env[63379]: DEBUG nova.compute.manager [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1746.554551] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f65d67-f02d-44b3-8c11-a6ea320a9453 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.583758] env[63379]: DEBUG nova.network.neutron [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Successfully updated port: 3f904953-8743-428b-8084-4936ee47a1df {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1746.765295] env[63379]: DEBUG nova.compute.manager [req-b01c08f6-cd01-472c-ab0b-dc0dbaafae0d req-7fdabc05-3af3-4a7b-8e65-8f31f3d38ff2 service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Received event network-vif-plugged-3f904953-8743-428b-8084-4936ee47a1df {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1746.765528] env[63379]: DEBUG oslo_concurrency.lockutils [req-b01c08f6-cd01-472c-ab0b-dc0dbaafae0d req-7fdabc05-3af3-4a7b-8e65-8f31f3d38ff2 service nova] Acquiring lock "3e875e92-673c-4cfa-86ce-fc270ae03e94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.765744] env[63379]: DEBUG oslo_concurrency.lockutils [req-b01c08f6-cd01-472c-ab0b-dc0dbaafae0d req-7fdabc05-3af3-4a7b-8e65-8f31f3d38ff2 service nova] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.765940] env[63379]: DEBUG oslo_concurrency.lockutils [req-b01c08f6-cd01-472c-ab0b-dc0dbaafae0d req-7fdabc05-3af3-4a7b-8e65-8f31f3d38ff2 service nova] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.766381] env[63379]: DEBUG nova.compute.manager [req-b01c08f6-cd01-472c-ab0b-dc0dbaafae0d req-7fdabc05-3af3-4a7b-8e65-8f31f3d38ff2 service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] No waiting events found dispatching network-vif-plugged-3f904953-8743-428b-8084-4936ee47a1df {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1746.766673] env[63379]: WARNING nova.compute.manager [req-b01c08f6-cd01-472c-ab0b-dc0dbaafae0d req-7fdabc05-3af3-4a7b-8e65-8f31f3d38ff2 service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Received unexpected event network-vif-plugged-3f904953-8743-428b-8084-4936ee47a1df for instance with vm_state building and task_state spawning. [ 1746.785746] env[63379]: DEBUG nova.compute.utils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1746.787936] env[63379]: DEBUG nova.objects.instance [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'pci_requests' on Instance uuid f983d089-7cfc-46a5-8f8d-f49f67aef1da {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1746.789908] env[63379]: DEBUG nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1746.790104] env[63379]: DEBUG nova.network.neutron [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1746.818595] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069693} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.821023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1746.821023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d0b50a-dee5-4a77-91d3-fd09903518e1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.842372] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] eda684fa-1595-4985-beb7-c298049411bf/eda684fa-1595-4985-beb7-c298049411bf.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1746.842919] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1bd393e-0dbc-498c-923d-606eb480051a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.862846] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1746.862846] env[63379]: value = "task-1779944" [ 1746.862846] env[63379]: _type = "Task" [ 1746.862846] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.867426] env[63379]: DEBUG nova.policy [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6ff7e397cc6244978364b58bea6386a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2aec447aaec8409e9e3751d68c0106df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1746.873450] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779944, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.069849] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74cccc87-4757-4507-a3bd-896a178df732 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.553s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.086929] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "refresh_cache-3e875e92-673c-4cfa-86ce-fc270ae03e94" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.086929] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquired lock "refresh_cache-3e875e92-673c-4cfa-86ce-fc270ae03e94" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.087115] env[63379]: DEBUG nova.network.neutron [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1747.164264] env[63379]: DEBUG nova.network.neutron [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.285922] env[63379]: DEBUG nova.network.neutron [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Successfully created port: 1ec6781c-1db3-427d-be1c-37534196f2f4 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1747.291362] env[63379]: DEBUG nova.objects.base [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1747.291657] env[63379]: DEBUG nova.network.neutron [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1747.293518] env[63379]: DEBUG nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1747.345852] env[63379]: DEBUG nova.policy [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1747.374011] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779944, 'name': ReconfigVM_Task, 'duration_secs': 0.342621} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.374011] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Reconfigured VM instance instance-00000055 to attach disk [datastore1] eda684fa-1595-4985-beb7-c298049411bf/eda684fa-1595-4985-beb7-c298049411bf.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1747.374596] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94b155c8-ed64-4f11-8d56-960bea797e9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.384814] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1747.384814] env[63379]: value = "task-1779945" [ 1747.384814] env[63379]: _type = "Task" [ 1747.384814] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.395437] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779945, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.633589] env[63379]: DEBUG nova.network.neutron [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1747.639879] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ea8fe2-8444-4d79-b28d-5dd09f903279 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.648452] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cfef4f-5961-440f-885f-8ce8610de8f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.683682] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Releasing lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.686052] env[63379]: DEBUG nova.objects.instance [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lazy-loading 'flavor' on Instance uuid 19a41941-0679-4971-8a44-c95b13f5c294 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1747.688323] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fa1066-15ea-4935-8064-bc90be349e82 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.697367] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13855d6-5e2c-44ef-a8d9-0b847eacd8b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.715618] env[63379]: DEBUG nova.compute.provider_tree [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1747.779848] env[63379]: DEBUG nova.network.neutron [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Successfully created port: d06b25f4-72d4-4f91-82df-8ff330c8ddc2 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1747.886386] env[63379]: DEBUG nova.network.neutron [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Updating instance_info_cache with network_info: [{"id": "3f904953-8743-428b-8084-4936ee47a1df", "address": "fa:16:3e:93:9d:90", "network": {"id": "b2ae020c-af72-45bd-92f6-89f9a2f1b430", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-536412963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02da0ce83a89450cb5011c53056cfa0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f904953-87", "ovs_interfaceid": "3f904953-8743-428b-8084-4936ee47a1df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.902011] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779945, 'name': Rename_Task, 'duration_secs': 0.181815} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.902315] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1747.902573] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9143c24d-ba92-4a44-aaed-e3b9913a5333 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.910753] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1747.910753] env[63379]: value = "task-1779946" [ 1747.910753] env[63379]: _type = "Task" [ 1747.910753] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.921759] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.195810] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14103661-1516-4e97-9e21-46e0c16df33a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.222855] env[63379]: DEBUG nova.scheduler.client.report [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1748.226334] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1748.226961] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14ba4d2-e3f3-4323-b61d-057153956f3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.234632] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1748.234632] env[63379]: value = "task-1779947" [ 1748.234632] env[63379]: _type = "Task" [ 1748.234632] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.244110] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.308449] env[63379]: DEBUG nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1748.339316] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1748.339695] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1748.340206] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.340404] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1748.340671] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.340908] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1748.341539] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1748.341656] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1748.341889] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1748.342183] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1748.342631] env[63379]: DEBUG nova.virt.hardware [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1748.343564] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da145a0-06c6-4863-b523-6dfefc54a727 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.353675] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136a536e-ccf6-45c2-90ce-1ed7d5030052 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.394810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Releasing lock "refresh_cache-3e875e92-673c-4cfa-86ce-fc270ae03e94" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.395292] env[63379]: DEBUG nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Instance network_info: |[{"id": "3f904953-8743-428b-8084-4936ee47a1df", "address": "fa:16:3e:93:9d:90", "network": {"id": "b2ae020c-af72-45bd-92f6-89f9a2f1b430", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-536412963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02da0ce83a89450cb5011c53056cfa0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f904953-87", "ovs_interfaceid": "3f904953-8743-428b-8084-4936ee47a1df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1748.395884] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:9d:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1880df72-582c-44cb-992d-88dc6a514914', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f904953-8743-428b-8084-4936ee47a1df', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1748.405099] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Creating folder: Project (02da0ce83a89450cb5011c53056cfa0b). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1748.405504] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00fe2214-3964-4009-a2f8-6b03c16ce7ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.416981] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Created folder: Project (02da0ce83a89450cb5011c53056cfa0b) in parent group-v369214. [ 1748.417228] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Creating folder: Instances. Parent ref: group-v369453. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1748.417469] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e868720f-3bec-40ef-b783-53dbacb6767b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.423728] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779946, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.431275] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Created folder: Instances in parent group-v369453. [ 1748.431640] env[63379]: DEBUG oslo.service.loopingcall [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.431798] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1748.432476] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7001214b-e505-40a6-b6e0-cb3a2dd48488 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.453487] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1748.453487] env[63379]: value = "task-1779950" [ 1748.453487] env[63379]: _type = "Task" [ 1748.453487] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.462013] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779950, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.728025] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.729275] env[63379]: DEBUG nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1748.733025] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.827s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.733025] env[63379]: DEBUG nova.objects.instance [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lazy-loading 'resources' on Instance uuid 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1748.746278] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779947, 'name': PowerOffVM_Task, 'duration_secs': 0.421115} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.746850] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.751882] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfiguring VM instance instance-0000003e to detach disk 2002 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1748.752443] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6f6b1e5-4de4-43ad-a405-6ee13e499218 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.772446] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1748.772446] env[63379]: value = "task-1779951" [ 1748.772446] env[63379]: _type = "Task" [ 1748.772446] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.784024] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779951, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.801357] env[63379]: DEBUG nova.objects.instance [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1748.921305] env[63379]: DEBUG oslo_vmware.api [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779946, 'name': PowerOnVM_Task, 'duration_secs': 0.62577} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.921543] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1748.921734] env[63379]: INFO nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Took 7.38 seconds to spawn the instance on the hypervisor. [ 1748.921919] env[63379]: DEBUG nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1748.922693] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd4f6a-9dc4-420f-9eaf-480b731dc202 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.965753] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779950, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.040020] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525b72c3-394c-439c-38d9-a03f0920dcce/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1749.040341] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdac2d3d-6b6d-41ed-99c2-f4926b6fb5a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.049145] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525b72c3-394c-439c-38d9-a03f0920dcce/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1749.049145] env[63379]: ERROR oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525b72c3-394c-439c-38d9-a03f0920dcce/disk-0.vmdk due to incomplete transfer. [ 1749.049145] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f516a061-6fa8-4dd0-a280-ec06d8534406 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.058221] env[63379]: DEBUG oslo_vmware.rw_handles [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525b72c3-394c-439c-38d9-a03f0920dcce/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1749.058679] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Uploaded image efafc8d3-6d68-4162-8ee3-9d41c7ec3367 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1749.062161] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1749.062161] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0282cdb4-4658-4b64-822f-8b6f566450be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.068085] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1749.068085] env[63379]: value = "task-1779952" [ 1749.068085] env[63379]: _type = "Task" [ 1749.068085] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.077346] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779952, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.238638] env[63379]: DEBUG nova.compute.utils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1749.244154] env[63379]: DEBUG nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1749.244534] env[63379]: DEBUG nova.network.neutron [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1749.284478] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.301453] env[63379]: DEBUG nova.network.neutron [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Successfully updated port: 1ec6781c-1db3-427d-be1c-37534196f2f4 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1749.307365] env[63379]: DEBUG nova.policy [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99f3906f7b7e47a1a81c5c8f38d5b4ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '645f0e0a5e1a44d59ca9c85da49bb454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1749.311369] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.311565] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.311744] env[63379]: DEBUG nova.network.neutron [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1749.311936] env[63379]: DEBUG nova.objects.instance [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'info_cache' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1749.446939] env[63379]: DEBUG nova.network.neutron [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Successfully updated port: d06b25f4-72d4-4f91-82df-8ff330c8ddc2 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1749.448302] env[63379]: INFO nova.compute.manager [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Took 26.00 seconds to build instance. [ 1749.466973] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779950, 'name': CreateVM_Task, 'duration_secs': 0.903371} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.468105] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1749.468105] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.468105] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.468368] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1749.468636] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccde4aa6-bf63-41b3-a043-72b40265d6cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.474341] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1749.474341] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f45a48-8c51-55f6-f903-f8dc23f9af36" [ 1749.474341] env[63379]: _type = "Task" [ 1749.474341] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.485450] env[63379]: DEBUG nova.compute.manager [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Received event network-changed-3f904953-8743-428b-8084-4936ee47a1df {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1749.485612] env[63379]: DEBUG nova.compute.manager [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Refreshing instance network info cache due to event network-changed-3f904953-8743-428b-8084-4936ee47a1df. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1749.485850] env[63379]: DEBUG oslo_concurrency.lockutils [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] Acquiring lock "refresh_cache-3e875e92-673c-4cfa-86ce-fc270ae03e94" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.485962] env[63379]: DEBUG oslo_concurrency.lockutils [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] Acquired lock "refresh_cache-3e875e92-673c-4cfa-86ce-fc270ae03e94" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.486271] env[63379]: DEBUG nova.network.neutron [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Refreshing network info cache for port 3f904953-8743-428b-8084-4936ee47a1df {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1749.491255] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f45a48-8c51-55f6-f903-f8dc23f9af36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.554150] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d069c42-97dd-481b-8028-9971ad9329bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.563222] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3593e848-3dfa-4c6d-9699-c32603ef1da1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.598539] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8184ae47-7e8c-44d4-a261-54b516a9bb86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.604500] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779952, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.610111] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41c3836-efd8-42c1-aa76-71632f259993 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.624543] env[63379]: DEBUG nova.compute.provider_tree [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1749.678714] env[63379]: DEBUG nova.network.neutron [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Successfully created port: d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1749.744222] env[63379]: DEBUG nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1749.749258] env[63379]: DEBUG nova.compute.manager [req-8bedf5e3-f2de-4216-b864-701f0432454a req-f4d9827a-2641-45a2-8263-bc9bf34a1043 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received event network-vif-plugged-d06b25f4-72d4-4f91-82df-8ff330c8ddc2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1749.749479] env[63379]: DEBUG oslo_concurrency.lockutils [req-8bedf5e3-f2de-4216-b864-701f0432454a req-f4d9827a-2641-45a2-8263-bc9bf34a1043 service nova] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.749696] env[63379]: DEBUG oslo_concurrency.lockutils [req-8bedf5e3-f2de-4216-b864-701f0432454a req-f4d9827a-2641-45a2-8263-bc9bf34a1043 service nova] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.749871] env[63379]: DEBUG oslo_concurrency.lockutils [req-8bedf5e3-f2de-4216-b864-701f0432454a req-f4d9827a-2641-45a2-8263-bc9bf34a1043 service nova] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.750420] env[63379]: DEBUG nova.compute.manager [req-8bedf5e3-f2de-4216-b864-701f0432454a req-f4d9827a-2641-45a2-8263-bc9bf34a1043 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] No waiting events found dispatching network-vif-plugged-d06b25f4-72d4-4f91-82df-8ff330c8ddc2 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1749.750654] env[63379]: WARNING nova.compute.manager [req-8bedf5e3-f2de-4216-b864-701f0432454a req-f4d9827a-2641-45a2-8263-bc9bf34a1043 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received unexpected event network-vif-plugged-d06b25f4-72d4-4f91-82df-8ff330c8ddc2 for instance with vm_state active and task_state None. [ 1749.785664] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779951, 'name': ReconfigVM_Task, 'duration_secs': 0.699318} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.786564] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfigured VM instance instance-0000003e to detach disk 2002 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1749.786782] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1749.787065] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-599079b6-da86-4f3c-9005-7b6d9bcf07f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.793690] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1749.793690] env[63379]: value = "task-1779953" [ 1749.793690] env[63379]: _type = "Task" [ 1749.793690] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.801735] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779953, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.804328] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "refresh_cache-ebfe6204-c7d5-4e0c-bb63-74d5755552f6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.804471] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquired lock "refresh_cache-ebfe6204-c7d5-4e0c-bb63-74d5755552f6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.804608] env[63379]: DEBUG nova.network.neutron [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1749.818220] env[63379]: DEBUG nova.objects.base [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Object Instance<1d76a28f-822d-4b4f-be2f-2ad3371b3979> lazy-loaded attributes: flavor,info_cache {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1749.950709] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.950927] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.951120] env[63379]: DEBUG nova.network.neutron [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1749.952477] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c2d54044-e8b6-4853-8cfd-b9a5df604a4e tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.513s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.986684] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f45a48-8c51-55f6-f903-f8dc23f9af36, 'name': SearchDatastore_Task, 'duration_secs': 0.023183} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.987044] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.987304] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1749.987577] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.987759] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.987983] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1749.988283] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10ef6742-7008-4456-ae20-d9e443f32a23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.998372] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1749.998590] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1749.999396] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-196197a7-73f0-474f-b4ed-9f7a7dba27f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.005529] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1750.005529] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f816e5-0d99-915d-9c36-a2db2f07c617" [ 1750.005529] env[63379]: _type = "Task" [ 1750.005529] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.014775] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f816e5-0d99-915d-9c36-a2db2f07c617, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.081626] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779952, 'name': Destroy_Task, 'duration_secs': 0.772126} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.081626] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Destroyed the VM [ 1750.081870] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1750.082206] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c7868c7c-1acb-418e-90a0-a4d73ad16300 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.091318] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1750.091318] env[63379]: value = "task-1779954" [ 1750.091318] env[63379]: _type = "Task" [ 1750.091318] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.099800] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779954, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.127785] env[63379]: DEBUG nova.scheduler.client.report [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1750.199929] env[63379]: DEBUG nova.network.neutron [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Updated VIF entry in instance network info cache for port 3f904953-8743-428b-8084-4936ee47a1df. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1750.200363] env[63379]: DEBUG nova.network.neutron [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Updating instance_info_cache with network_info: [{"id": "3f904953-8743-428b-8084-4936ee47a1df", "address": "fa:16:3e:93:9d:90", "network": {"id": "b2ae020c-af72-45bd-92f6-89f9a2f1b430", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-536412963-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02da0ce83a89450cb5011c53056cfa0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f904953-87", "ovs_interfaceid": "3f904953-8743-428b-8084-4936ee47a1df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.304527] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779953, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.334713] env[63379]: DEBUG nova.network.neutron [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1750.471398] env[63379]: DEBUG nova.network.neutron [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Updating instance_info_cache with network_info: [{"id": "1ec6781c-1db3-427d-be1c-37534196f2f4", "address": "fa:16:3e:43:ce:61", "network": {"id": "14089190-27d2-4297-9e9a-7e6780648a34", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1744878468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2aec447aaec8409e9e3751d68c0106df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ec6781c-1d", "ovs_interfaceid": "1ec6781c-1db3-427d-be1c-37534196f2f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.517807] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f816e5-0d99-915d-9c36-a2db2f07c617, 'name': SearchDatastore_Task, 'duration_secs': 0.01174} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.519319] env[63379]: WARNING nova.network.neutron [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] 501025fb-aee7-4f74-80fd-af4976529317 already exists in list: networks containing: ['501025fb-aee7-4f74-80fd-af4976529317']. ignoring it [ 1750.521745] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c5c50b6-44a7-4408-bc50-4a13962ada05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.528131] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1750.528131] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]520e6156-b6ee-f217-be26-d2f7a944f4c0" [ 1750.528131] env[63379]: _type = "Task" [ 1750.528131] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.535572] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520e6156-b6ee-f217-be26-d2f7a944f4c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.601269] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779954, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.633239] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.901s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.635548] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.332s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.635785] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.636867] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1750.637218] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b182a9e0-a99c-43d1-9861-1d37ee6b3c4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.645657] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbc5400-7753-42eb-8714-31ce579da290 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.665739] env[63379]: INFO nova.scheduler.client.report [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Deleted allocations for instance 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf [ 1750.667230] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5774d1f7-0e70-4382-b0ee-d71281a00e71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.677013] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87549fca-9d78-4030-9a2b-7d7c3a1f55b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.706822] env[63379]: DEBUG oslo_concurrency.lockutils [req-7010194e-0bc3-4258-9747-8440e9c5b5f9 req-833e6bdf-be1e-4a01-8327-0104e13e132e service nova] Releasing lock "refresh_cache-3e875e92-673c-4cfa-86ce-fc270ae03e94" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.707315] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178433MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1750.707454] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.707649] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.740173] env[63379]: DEBUG nova.network.neutron [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating instance_info_cache with network_info: [{"id": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "address": "fa:16:3e:00:25:b1", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac41cb5-75", "ovs_interfaceid": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.757115] env[63379]: DEBUG nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1750.785010] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1750.785284] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1750.785444] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.785629] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1750.785780] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.786153] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1750.786414] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1750.786725] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1750.787096] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1750.787342] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1750.787553] env[63379]: DEBUG nova.virt.hardware [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1750.788701] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da383e74-fdf0-4b74-bdf5-bacf44ce1416 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.800775] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcc9b41-9860-46b2-987f-c0e709137f19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.820196] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779953, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.976692] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Releasing lock "refresh_cache-ebfe6204-c7d5-4e0c-bb63-74d5755552f6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.977057] env[63379]: DEBUG nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Instance network_info: |[{"id": "1ec6781c-1db3-427d-be1c-37534196f2f4", "address": "fa:16:3e:43:ce:61", "network": {"id": "14089190-27d2-4297-9e9a-7e6780648a34", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1744878468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2aec447aaec8409e9e3751d68c0106df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ec6781c-1d", "ovs_interfaceid": "1ec6781c-1db3-427d-be1c-37534196f2f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1750.977505] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:ce:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd0bdd11b-58af-4cc0-9d38-8322e1bb4e74', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ec6781c-1db3-427d-be1c-37534196f2f4', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1750.988415] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Creating folder: Project (2aec447aaec8409e9e3751d68c0106df). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1750.988619] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7a8fe6d-0f0b-49df-9ae3-29327e14acbb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.002143] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Created folder: Project (2aec447aaec8409e9e3751d68c0106df) in parent group-v369214. [ 1751.002339] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Creating folder: Instances. Parent ref: group-v369456. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1751.002807] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f1fbe68-a223-4d25-aa2c-1989fe97ef86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.008453] env[63379]: DEBUG nova.network.neutron [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d06b25f4-72d4-4f91-82df-8ff330c8ddc2", "address": "fa:16:3e:86:d8:5b", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd06b25f4-72", "ovs_interfaceid": "d06b25f4-72d4-4f91-82df-8ff330c8ddc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1751.013742] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Created folder: Instances in parent group-v369456. [ 1751.014186] env[63379]: DEBUG oslo.service.loopingcall [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1751.014555] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1751.014883] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ac2cd5b-adf2-47a4-8877-524e9c3b3839 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.043950] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]520e6156-b6ee-f217-be26-d2f7a944f4c0, 'name': SearchDatastore_Task, 'duration_secs': 0.052097} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.045427] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.045699] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 3e875e92-673c-4cfa-86ce-fc270ae03e94/3e875e92-673c-4cfa-86ce-fc270ae03e94.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1751.045942] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1751.045942] env[63379]: value = "task-1779957" [ 1751.045942] env[63379]: _type = "Task" [ 1751.045942] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.046156] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-383f08c9-f708-4cb5-a2ae-93dc1ddb690a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.058764] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779957, 'name': CreateVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.060479] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1751.060479] env[63379]: value = "task-1779958" [ 1751.060479] env[63379]: _type = "Task" [ 1751.060479] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.072019] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.103136] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779954, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.179607] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cad26875-63ff-4364-a68e-2cdd2ceb19dd tempest-ImagesOneServerNegativeTestJSON-1021560204 tempest-ImagesOneServerNegativeTestJSON-1021560204-project-member] Lock "6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.228s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.242780] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.259349] env[63379]: DEBUG nova.network.neutron [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Successfully updated port: d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1751.309277] env[63379]: DEBUG oslo_vmware.api [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1779953, 'name': PowerOnVM_Task, 'duration_secs': 1.416008} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.309277] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1751.309277] env[63379]: DEBUG nova.compute.manager [None req-de571fec-bc51-44ba-92b4-20be3a98b5f3 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1751.309846] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422f6658-1d53-413d-9944-7fe27633f6b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.511920] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.512842] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.513122] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.514074] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d54b640-eb20-482c-b025-3abd91e3b74a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.536857] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1751.537133] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1751.537301] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1751.537520] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1751.537700] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1751.537859] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1751.538083] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1751.538253] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1751.538422] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1751.538591] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1751.538773] env[63379]: DEBUG nova.virt.hardware [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1751.546132] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Reconfiguring VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1751.546585] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04410fdd-4338-4b55-b959-70a43bf935be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.566781] env[63379]: DEBUG oslo_vmware.api [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1751.566781] env[63379]: value = "task-1779959" [ 1751.566781] env[63379]: _type = "Task" [ 1751.566781] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.572277] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779957, 'name': CreateVM_Task, 'duration_secs': 0.490868} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.572509] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779958, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.575146] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1751.575792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.575959] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.576296] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1751.576868] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc20c868-27c6-41e0-abf7-c6f86520ca1a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.581565] env[63379]: DEBUG oslo_vmware.api [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779959, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.585343] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1751.585343] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5283c5e9-f9bc-3be5-80dc-ca70bcc01ee6" [ 1751.585343] env[63379]: _type = "Task" [ 1751.585343] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.594886] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5283c5e9-f9bc-3be5-80dc-ca70bcc01ee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.603967] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779954, 'name': RemoveSnapshot_Task, 'duration_secs': 1.133957} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.604206] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1751.605027] env[63379]: DEBUG nova.compute.manager [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1751.605242] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0422477a-f1eb-4b94-82ed-458b86de1065 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.746777] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.746958] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d2de9da-9dfe-42d2-b206-bb5139b1970b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.747100] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.747223] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d76a28f-822d-4b4f-be2f-2ad3371b3979 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.747339] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance f983d089-7cfc-46a5-8f8d-f49f67aef1da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.747455] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 38be0e8d-188b-4a98-aedc-5d941b63c000 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748026] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 19a41941-0679-4971-8a44-c95b13f5c294 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748026] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 510db409-0b4c-494a-8084-39ef3cd6c918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748026] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a7cce485-7476-4ea1-b127-68d879e164cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748026] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 19941838-d6b0-4fb8-9d06-f4a1b80ba428 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748026] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 7edacb20-8472-4e9d-9408-31947d9f284e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748291] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance c900bb90-b4a8-40a2-9436-5a0ced1dd919 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748291] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance b9bc2562-9475-400e-9cf9-646b8f4c8cf2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748458] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 8877e0f7-091b-4a91-bb5c-fb7733e5f70c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748458] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance eda684fa-1595-4985-beb7-c298049411bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748536] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 3e875e92-673c-4cfa-86ce-fc270ae03e94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748650] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance ebfe6204-c7d5-4e0c-bb63-74d5755552f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748755] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 4b419aa8-d4da-45fd-a6da-6f05ee851f2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1751.748974] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1751.749126] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=19GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1751.752518] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1751.753044] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ece43e2f-51d3-4e12-a10f-afb0e6db98ff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.760012] env[63379]: DEBUG oslo_vmware.api [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1751.760012] env[63379]: value = "task-1779960" [ 1751.760012] env[63379]: _type = "Task" [ 1751.760012] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.764397] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.764397] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.764397] env[63379]: DEBUG nova.network.neutron [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1751.770240] env[63379]: DEBUG oslo_vmware.api [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779960, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.806939] env[63379]: DEBUG nova.compute.manager [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Received event network-vif-plugged-1ec6781c-1db3-427d-be1c-37534196f2f4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1751.807154] env[63379]: DEBUG oslo_concurrency.lockutils [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] Acquiring lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.807324] env[63379]: DEBUG oslo_concurrency.lockutils [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.807929] env[63379]: DEBUG oslo_concurrency.lockutils [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.807929] env[63379]: DEBUG nova.compute.manager [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] No waiting events found dispatching network-vif-plugged-1ec6781c-1db3-427d-be1c-37534196f2f4 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1751.807929] env[63379]: WARNING nova.compute.manager [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Received unexpected event network-vif-plugged-1ec6781c-1db3-427d-be1c-37534196f2f4 for instance with vm_state building and task_state spawning. [ 1751.808204] env[63379]: DEBUG nova.compute.manager [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Received event network-changed-1ec6781c-1db3-427d-be1c-37534196f2f4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1751.808260] env[63379]: DEBUG nova.compute.manager [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Refreshing instance network info cache due to event network-changed-1ec6781c-1db3-427d-be1c-37534196f2f4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1751.808452] env[63379]: DEBUG oslo_concurrency.lockutils [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] Acquiring lock "refresh_cache-ebfe6204-c7d5-4e0c-bb63-74d5755552f6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.808591] env[63379]: DEBUG oslo_concurrency.lockutils [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] Acquired lock "refresh_cache-ebfe6204-c7d5-4e0c-bb63-74d5755552f6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.808752] env[63379]: DEBUG nova.network.neutron [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Refreshing network info cache for port 1ec6781c-1db3-427d-be1c-37534196f2f4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1752.004781] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097cbb79-61b1-4c56-aa5f-09bdf98620a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.013282] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29400325-d3df-4cfa-a4b4-0c73c2476a7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.044387] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05332fdf-bc5e-423c-92c6-d1b03de3f0ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.052820] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2fb9f9-f1d1-4580-b75f-265bb4b25b4d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.069048] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.085265] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779958, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52901} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.088932] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 3e875e92-673c-4cfa-86ce-fc270ae03e94/3e875e92-673c-4cfa-86ce-fc270ae03e94.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1752.089293] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1752.090325] env[63379]: DEBUG oslo_vmware.api [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779959, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.090592] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70f7d3ac-48c3-4979-86f6-4b62ac29e1c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.102830] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5283c5e9-f9bc-3be5-80dc-ca70bcc01ee6, 'name': SearchDatastore_Task, 'duration_secs': 0.011922} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.104114] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.104739] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1752.104739] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.104863] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.105140] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1752.105903] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1752.105903] env[63379]: value = "task-1779961" [ 1752.105903] env[63379]: _type = "Task" [ 1752.105903] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.106094] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b11cba40-c15a-488b-b48c-348552a61076 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.119168] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779961, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.120689] env[63379]: INFO nova.compute.manager [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Shelve offloading [ 1752.123094] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1752.123279] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1752.125408] env[63379]: DEBUG nova.compute.manager [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received event network-changed-d06b25f4-72d4-4f91-82df-8ff330c8ddc2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1752.125643] env[63379]: DEBUG nova.compute.manager [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Refreshing instance network info cache due to event network-changed-d06b25f4-72d4-4f91-82df-8ff330c8ddc2. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1752.125901] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Acquiring lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.126123] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Acquired lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.126328] env[63379]: DEBUG nova.network.neutron [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Refreshing network info cache for port d06b25f4-72d4-4f91-82df-8ff330c8ddc2 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1752.127713] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1752.128954] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7f7d082-9e0b-4996-9123-1724c859805c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.131369] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16049cea-f2c0-4afb-bcf4-2b2b2579a1c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.136990] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1752.136990] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cced8e-e43e-febb-0c18-e27a4f59b8c5" [ 1752.136990] env[63379]: _type = "Task" [ 1752.136990] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.142240] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1752.142240] env[63379]: value = "task-1779962" [ 1752.142240] env[63379]: _type = "Task" [ 1752.142240] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.150665] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cced8e-e43e-febb-0c18-e27a4f59b8c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.156548] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1752.156859] env[63379]: DEBUG nova.compute.manager [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1752.158071] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53922a51-a027-47e1-a6c4-4c3e68a5a62a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.164509] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.164720] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.164944] env[63379]: DEBUG nova.network.neutron [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.271337] env[63379]: DEBUG oslo_vmware.api [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1779960, 'name': PowerOnVM_Task, 'duration_secs': 0.45569} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.271783] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1752.271874] env[63379]: DEBUG nova.compute.manager [None req-4487d2ec-60aa-4558-ae80-2f0d7919e8ee tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1752.272980] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231f52a7-7c8e-4483-b365-97d3e0027f06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.301418] env[63379]: DEBUG nova.network.neutron [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1752.452415] env[63379]: DEBUG nova.network.neutron [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.576952] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1752.581084] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.583025] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.583025] env[63379]: DEBUG nova.compute.manager [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1752.585858] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e29f3a-1bf6-4c81-97bd-a96670b56225 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.595304] env[63379]: DEBUG oslo_vmware.api [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779959, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.597318] env[63379]: DEBUG nova.compute.manager [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1752.599351] env[63379]: DEBUG nova.objects.instance [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lazy-loading 'flavor' on Instance uuid c900bb90-b4a8-40a2-9436-5a0ced1dd919 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1752.618657] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779961, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07212} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.618657] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1752.619445] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5884516c-bf4f-4f69-9d35-793e860edc9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.643227] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 3e875e92-673c-4cfa-86ce-fc270ae03e94/3e875e92-673c-4cfa-86ce-fc270ae03e94.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1752.644573] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1491697-86f5-463b-8035-0cf7eceb4aa5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.672104] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cced8e-e43e-febb-0c18-e27a4f59b8c5, 'name': SearchDatastore_Task, 'duration_secs': 0.016824} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.674027] env[63379]: DEBUG nova.network.neutron [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Updated VIF entry in instance network info cache for port 1ec6781c-1db3-427d-be1c-37534196f2f4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1752.675736] env[63379]: DEBUG nova.network.neutron [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Updating instance_info_cache with network_info: [{"id": "1ec6781c-1db3-427d-be1c-37534196f2f4", "address": "fa:16:3e:43:ce:61", "network": {"id": "14089190-27d2-4297-9e9a-7e6780648a34", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1744878468-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2aec447aaec8409e9e3751d68c0106df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d0bdd11b-58af-4cc0-9d38-8322e1bb4e74", "external-id": "nsx-vlan-transportzone-398", "segmentation_id": 398, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ec6781c-1d", "ovs_interfaceid": "1ec6781c-1db3-427d-be1c-37534196f2f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.676591] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1752.676591] env[63379]: value = "task-1779963" [ 1752.676591] env[63379]: _type = "Task" [ 1752.676591] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.677014] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-189d33b4-d475-4538-91a0-e70f9580c938 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.686329] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1752.686329] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a16c8-dca6-e816-fb7b-719d0fc3d647" [ 1752.686329] env[63379]: _type = "Task" [ 1752.686329] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.690135] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.699832] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a16c8-dca6-e816-fb7b-719d0fc3d647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.895152] env[63379]: DEBUG nova.compute.manager [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1752.955257] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.955667] env[63379]: DEBUG nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Instance network_info: |[{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1752.956237] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:8d:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1752.968273] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating folder: Project (645f0e0a5e1a44d59ca9c85da49bb454). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1752.969703] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b26e344-7bbf-4e85-81ab-343c324af389 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.982251] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Created folder: Project (645f0e0a5e1a44d59ca9c85da49bb454) in parent group-v369214. [ 1752.982480] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating folder: Instances. Parent ref: group-v369459. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1752.982853] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2aeaac06-90a8-46ea-b644-882c98b06ee4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.993220] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Created folder: Instances in parent group-v369459. [ 1752.993500] env[63379]: DEBUG oslo.service.loopingcall [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1752.993709] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1752.993933] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3a81557-4658-4665-b012-e5b2b640fdc1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.012399] env[63379]: DEBUG nova.network.neutron [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Updating instance_info_cache with network_info: [{"id": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "address": "fa:16:3e:8b:77:3f", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c98bd39-d7", "ovs_interfaceid": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.019083] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1753.019083] env[63379]: value = "task-1779966" [ 1753.019083] env[63379]: _type = "Task" [ 1753.019083] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.027558] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779966, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.089973] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1753.090300] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.383s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.099011] env[63379]: DEBUG oslo_vmware.api [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779959, 'name': ReconfigVM_Task, 'duration_secs': 1.320982} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.099776] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.100213] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Reconfigured VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1753.114395] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1753.114699] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41c65053-d6c6-4ca0-b320-c5b0b7f98364 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.122634] env[63379]: DEBUG oslo_vmware.api [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1753.122634] env[63379]: value = "task-1779967" [ 1753.122634] env[63379]: _type = "Task" [ 1753.122634] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.130946] env[63379]: DEBUG oslo_vmware.api [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.177843] env[63379]: DEBUG oslo_concurrency.lockutils [req-bb1a9aba-b2d0-4059-8506-674523b6bae0 req-6a3b3b92-f22d-4a97-9592-623089cb77d3 service nova] Releasing lock "refresh_cache-ebfe6204-c7d5-4e0c-bb63-74d5755552f6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.194145] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779963, 'name': ReconfigVM_Task, 'duration_secs': 0.358412} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.195146] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 3e875e92-673c-4cfa-86ce-fc270ae03e94/3e875e92-673c-4cfa-86ce-fc270ae03e94.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1753.195834] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0966db3-b48e-4853-b9ff-88dfad6ce430 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.203190] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a16c8-dca6-e816-fb7b-719d0fc3d647, 'name': SearchDatastore_Task, 'duration_secs': 0.012474} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.204518] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.204794] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ebfe6204-c7d5-4e0c-bb63-74d5755552f6/ebfe6204-c7d5-4e0c-bb63-74d5755552f6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1753.205144] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1753.205144] env[63379]: value = "task-1779968" [ 1753.205144] env[63379]: _type = "Task" [ 1753.205144] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.205358] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce9e1b25-5e17-4aa3-a9d4-03670a43b3d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.215889] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779968, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.217265] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1753.217265] env[63379]: value = "task-1779969" [ 1753.217265] env[63379]: _type = "Task" [ 1753.217265] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.227663] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.365933] env[63379]: DEBUG nova.network.neutron [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updated VIF entry in instance network info cache for port d06b25f4-72d4-4f91-82df-8ff330c8ddc2. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1753.366590] env[63379]: DEBUG nova.network.neutron [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d06b25f4-72d4-4f91-82df-8ff330c8ddc2", "address": "fa:16:3e:86:d8:5b", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd06b25f4-72", "ovs_interfaceid": "d06b25f4-72d4-4f91-82df-8ff330c8ddc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.428338] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.428713] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.515222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.532952] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1779966, 'name': CreateVM_Task, 'duration_secs': 0.374799} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.533716] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1753.534250] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.534470] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.534913] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1753.535254] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c0916f7-aebd-45cd-806a-b3a816ea5358 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.542569] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1753.542569] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5299ea53-8fb6-fc5c-0810-0a693bba0b7f" [ 1753.542569] env[63379]: _type = "Task" [ 1753.542569] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.552107] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5299ea53-8fb6-fc5c-0810-0a693bba0b7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.607912] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c0e7e230-1db8-4f77-83c1-9b2df8011afd tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-f983d089-7cfc-46a5-8f8d-f49f67aef1da-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.410s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.643382] env[63379]: DEBUG oslo_vmware.api [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.718148] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779968, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.726110] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.854736] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1753.856110] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbe9409-ac70-492d-a589-2e5178641dd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.866452] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1753.866817] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc177042-a036-4d40-bb65-fe26064cf4ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.869452] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Releasing lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.869770] env[63379]: DEBUG nova.compute.manager [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Received event network-vif-plugged-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1753.870359] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.870359] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.870616] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.870821] env[63379]: DEBUG nova.compute.manager [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] No waiting events found dispatching network-vif-plugged-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1753.871081] env[63379]: WARNING nova.compute.manager [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Received unexpected event network-vif-plugged-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e for instance with vm_state building and task_state spawning. [ 1753.871323] env[63379]: DEBUG nova.compute.manager [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Received event network-changed-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1753.871546] env[63379]: DEBUG nova.compute.manager [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Refreshing instance network info cache due to event network-changed-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1753.871808] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.872016] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.872275] env[63379]: DEBUG nova.network.neutron [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Refreshing network info cache for port d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1753.937295] env[63379]: INFO nova.compute.claims [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1754.055147] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5299ea53-8fb6-fc5c-0810-0a693bba0b7f, 'name': SearchDatastore_Task, 'duration_secs': 0.009865} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.056788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.057113] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1754.057372] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.057534] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.057780] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1754.058126] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1754.058336] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1754.058540] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleting the datastore file [datastore1] 7edacb20-8472-4e9d-9408-31947d9f284e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1754.058801] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-710200e3-e2ea-4244-8c85-6512d15edec7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.060840] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1b813f9-71d9-46d0-b2e2-c5734615bd8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.067630] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1754.067630] env[63379]: value = "task-1779971" [ 1754.067630] env[63379]: _type = "Task" [ 1754.067630] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.069814] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1754.070050] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1754.074076] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f03c0ef6-3e9b-4df8-a676-a1c928b3a3c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.079702] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1754.079702] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5253574c-311b-20aa-ab92-170b091bf92d" [ 1754.079702] env[63379]: _type = "Task" [ 1754.079702] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.083059] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.092883] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5253574c-311b-20aa-ab92-170b091bf92d, 'name': SearchDatastore_Task, 'duration_secs': 0.009562} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.093777] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a791f76-b47d-4492-b0b9-529f9368b26b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.100150] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1754.100150] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8a018-478f-2ac3-4436-6be67ec3e1be" [ 1754.100150] env[63379]: _type = "Task" [ 1754.100150] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.108347] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8a018-478f-2ac3-4436-6be67ec3e1be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.134024] env[63379]: DEBUG oslo_vmware.api [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.220103] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779968, 'name': Rename_Task, 'duration_secs': 0.954938} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.224031] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1754.224328] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ecb8257-5933-4582-8727-8ede51ef54d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.233056] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779969, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.234715] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1754.234715] env[63379]: value = "task-1779972" [ 1754.234715] env[63379]: _type = "Task" [ 1754.234715] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.247392] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.389833] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Received event network-changed-2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1754.389833] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing instance network info cache due to event network-changed-2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1754.389833] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Acquiring lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.389833] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Acquired lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.390318] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1754.447246] env[63379]: INFO nova.compute.resource_tracker [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating resource usage from migration 26f6c1b9-5396-465b-824d-55a899bd9ddd [ 1754.483318] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "38be0e8d-188b-4a98-aedc-5d941b63c000" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.483606] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.483870] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "38be0e8d-188b-4a98-aedc-5d941b63c000-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.484092] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.484289] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.489361] env[63379]: INFO nova.compute.manager [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Terminating instance [ 1754.498767] env[63379]: DEBUG nova.compute.manager [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1754.499096] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1754.500151] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95943abd-896b-4d62-ae3c-ba03c342dfd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.512464] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1754.512464] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7623c9b4-f03c-4122-bdd0-7c7a4e0cc4b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.522011] env[63379]: DEBUG oslo_vmware.api [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1754.522011] env[63379]: value = "task-1779973" [ 1754.522011] env[63379]: _type = "Task" [ 1754.522011] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.533760] env[63379]: DEBUG oslo_vmware.api [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.583784] env[63379]: DEBUG oslo_vmware.api [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1779971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.479494} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.584129] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1754.584465] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1754.584632] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1754.613524] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8a018-478f-2ac3-4436-6be67ec3e1be, 'name': SearchDatastore_Task, 'duration_secs': 0.076504} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.614745] env[63379]: INFO nova.scheduler.client.report [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted allocations for instance 7edacb20-8472-4e9d-9408-31947d9f284e [ 1754.617647] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.617772] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 4b419aa8-d4da-45fd-a6da-6f05ee851f2f/4b419aa8-d4da-45fd-a6da-6f05ee851f2f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1754.620541] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-084ed1b2-7e29-4fe3-82b5-46ada975046d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.628827] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1754.628827] env[63379]: value = "task-1779974" [ 1754.628827] env[63379]: _type = "Task" [ 1754.628827] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.635908] env[63379]: DEBUG oslo_vmware.api [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779967, 'name': PowerOffVM_Task, 'duration_secs': 1.210156} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.636235] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1754.636479] env[63379]: DEBUG nova.compute.manager [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1754.637293] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef0cd61-be94-46ee-8940-27eeb9f949f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.647512] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.735393] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779969, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.238044} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.738983] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] ebfe6204-c7d5-4e0c-bb63-74d5755552f6/ebfe6204-c7d5-4e0c-bb63-74d5755552f6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1754.739439] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1754.739739] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6711d5bc-b5ed-4c79-8f75-b817ee41070f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.750224] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779972, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.751746] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1754.751746] env[63379]: value = "task-1779975" [ 1754.751746] env[63379]: _type = "Task" [ 1754.751746] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.764518] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.780317] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27597dd8-e0a9-4c81-abf7-9fca13a6f6cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.788415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2ed993-81f9-41ac-bf55-84b0c5efca22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.829999] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c867d2e-d927-4414-bfb8-f186a68743b3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.840489] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d16f90-49b8-4d2c-866b-3e9a88f19a59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.857667] env[63379]: DEBUG nova.compute.provider_tree [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1754.922879] env[63379]: DEBUG nova.network.neutron [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updated VIF entry in instance network info cache for port d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1754.922879] env[63379]: DEBUG nova.network.neutron [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.035053] env[63379]: DEBUG oslo_vmware.api [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779973, 'name': PowerOffVM_Task, 'duration_secs': 0.306759} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.035053] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1755.035284] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1755.035474] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2dd5c3b-efee-4beb-9066-15815674f817 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.119801] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.143887] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779974, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.160212] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6fd21a44-3e31-450f-888b-853d3aa2a846 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.578s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.265945] env[63379]: DEBUG oslo_vmware.api [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779972, 'name': PowerOnVM_Task, 'duration_secs': 0.957668} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.268599] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1755.268599] env[63379]: INFO nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1755.268599] env[63379]: DEBUG nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1755.271386] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d143edba-31a7-407e-be77-5a68a653d5bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.281445] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.175254} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.282947] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updated VIF entry in instance network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1755.283495] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.286744] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1755.287464] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1755.287692] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1755.287903] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Deleting the datastore file [datastore1] 38be0e8d-188b-4a98-aedc-5d941b63c000 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1755.291771] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328ba934-d5fa-4d66-8f8f-8c1715e2b2df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.292485] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-756d4c46-1ea3-4083-a5f8-db5b20d6748b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.305768] env[63379]: DEBUG oslo_vmware.api [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for the task: (returnval){ [ 1755.305768] env[63379]: value = "task-1779977" [ 1755.305768] env[63379]: _type = "Task" [ 1755.305768] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.327759] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] ebfe6204-c7d5-4e0c-bb63-74d5755552f6/ebfe6204-c7d5-4e0c-bb63-74d5755552f6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1755.332204] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b113203f-cc7d-42b7-9c30-87c41b8318a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.354958] env[63379]: DEBUG oslo_vmware.api [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.355936] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1755.355936] env[63379]: value = "task-1779978" [ 1755.355936] env[63379]: _type = "Task" [ 1755.355936] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.360142] env[63379]: DEBUG nova.scheduler.client.report [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1755.365365] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-f983d089-7cfc-46a5-8f8d-f49f67aef1da-d06b25f4-72d4-4f91-82df-8ff330c8ddc2" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.365605] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-f983d089-7cfc-46a5-8f8d-f49f67aef1da-d06b25f4-72d4-4f91-82df-8ff330c8ddc2" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.370581] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.425381] env[63379]: DEBUG oslo_concurrency.lockutils [req-56af320f-34e3-4c53-aab5-96e794f84a66 req-2fa0dbc9-17d1-4578-914b-2fa3af9828d0 service nova] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.640976] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758587} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.641310] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 4b419aa8-d4da-45fd-a6da-6f05ee851f2f/4b419aa8-d4da-45fd-a6da-6f05ee851f2f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1755.641536] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1755.641796] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-137bbffc-0ae7-4a7b-a545-b9f264219c1a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.649579] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1755.649579] env[63379]: value = "task-1779979" [ 1755.649579] env[63379]: _type = "Task" [ 1755.649579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.657862] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.751284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "7edacb20-8472-4e9d-9408-31947d9f284e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.792026] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Releasing lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.792026] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Received event network-changed-2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1755.792026] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing instance network info cache due to event network-changed-2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1755.792026] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Acquiring lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.792026] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Acquired lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.792026] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Refreshing network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1755.808733] env[63379]: INFO nova.compute.manager [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Took 30.04 seconds to build instance. [ 1755.828259] env[63379]: DEBUG oslo_vmware.api [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.867102] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.438s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.867278] env[63379]: INFO nova.compute.manager [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Migrating [ 1755.877600] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.879059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.759s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.879346] env[63379]: DEBUG nova.objects.instance [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'resources' on Instance uuid 7edacb20-8472-4e9d-9408-31947d9f284e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1755.880860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.881041] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.888137] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6945bc-e70a-4cef-84ae-6a014ecd13e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.910655] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0acb8b1-9809-4656-94fe-24fffd0ecd44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.942020] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Reconfiguring VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1755.942190] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b33367f2-1f5b-4044-aed3-d6d33e3119da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.961894] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1755.961894] env[63379]: value = "task-1779980" [ 1755.961894] env[63379]: _type = "Task" [ 1755.961894] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.970541] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.161059] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065005} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.161363] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1756.162194] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08cf93f-0ea0-4ea9-baa5-c26e0a264191 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.185688] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 4b419aa8-d4da-45fd-a6da-6f05ee851f2f/4b419aa8-d4da-45fd-a6da-6f05ee851f2f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1756.186481] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e879da14-ca0b-4a6f-8446-263b8c9acc0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.207643] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1756.207643] env[63379]: value = "task-1779981" [ 1756.207643] env[63379]: _type = "Task" [ 1756.207643] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.215913] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779981, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.311338] env[63379]: DEBUG oslo_concurrency.lockutils [None req-70104fa6-db97-4e5b-964f-33f5dd3eed7b tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.557s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.326887] env[63379]: DEBUG oslo_vmware.api [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Task: {'id': task-1779977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.737348} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.327211] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1756.327406] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1756.327601] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1756.327776] env[63379]: INFO nova.compute.manager [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1756.328029] env[63379]: DEBUG oslo.service.loopingcall [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1756.328234] env[63379]: DEBUG nova.compute.manager [-] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1756.328331] env[63379]: DEBUG nova.network.neutron [-] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1756.369358] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779978, 'name': ReconfigVM_Task, 'duration_secs': 0.933419} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.370047] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Reconfigured VM instance instance-00000057 to attach disk [datastore1] ebfe6204-c7d5-4e0c-bb63-74d5755552f6/ebfe6204-c7d5-4e0c-bb63-74d5755552f6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1756.371119] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6967dc41-2aa4-4ec2-afae-05d5bf32f7cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.378406] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1756.378406] env[63379]: value = "task-1779982" [ 1756.378406] env[63379]: _type = "Task" [ 1756.378406] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.383874] env[63379]: DEBUG nova.objects.instance [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'numa_topology' on Instance uuid 7edacb20-8472-4e9d-9408-31947d9f284e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.391356] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779982, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.394043] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.394179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.394362] env[63379]: DEBUG nova.network.neutron [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1756.400303] env[63379]: DEBUG nova.objects.instance [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lazy-loading 'flavor' on Instance uuid c900bb90-b4a8-40a2-9436-5a0ced1dd919 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.473350] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.723033] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779981, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.889861] env[63379]: DEBUG nova.objects.base [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Object Instance<7edacb20-8472-4e9d-9408-31947d9f284e> lazy-loaded attributes: resources,numa_topology {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1756.892219] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779982, 'name': Rename_Task, 'duration_secs': 0.260824} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.895386] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1756.895386] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc20279f-8a2a-4de8-8019-a62a4db2ecd4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.905268] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1756.905268] env[63379]: value = "task-1779983" [ 1756.905268] env[63379]: _type = "Task" [ 1756.905268] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.905641] env[63379]: DEBUG oslo_concurrency.lockutils [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.907890] env[63379]: DEBUG oslo_concurrency.lockutils [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquired lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.907890] env[63379]: DEBUG nova.network.neutron [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1756.907890] env[63379]: DEBUG nova.objects.instance [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lazy-loading 'info_cache' on Instance uuid c900bb90-b4a8-40a2-9436-5a0ced1dd919 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.919653] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779983, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.976081] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.009814] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updated VIF entry in instance network info cache for port 2d279162-72d1-4378-b83d-c80b2815f680. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1757.009814] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [{"id": "2d279162-72d1-4378-b83d-c80b2815f680", "address": "fa:16:3e:8c:45:f4", "network": {"id": "3a5c4f8e-5c7c-4623-90f8-f1b83e5b35f8", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-709139332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce15a519ec5744feb0731439b2534fc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "822050c7-1845-485d-b87e-73778d21c33c", "external-id": "nsx-vlan-transportzone-701", "segmentation_id": 701, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d279162-72", "ovs_interfaceid": "2d279162-72d1-4378-b83d-c80b2815f680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.048200] env[63379]: DEBUG nova.compute.manager [req-8a73ace7-6fac-4143-aa8d-0684362ccd0c req-c0760458-d5c5-49d8-9fb0-aee0eb3ee09c service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Received event network-vif-deleted-a8926575-6550-43c6-b23d-a15787ee76c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1757.048338] env[63379]: INFO nova.compute.manager [req-8a73ace7-6fac-4143-aa8d-0684362ccd0c req-c0760458-d5c5-49d8-9fb0-aee0eb3ee09c service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Neutron deleted interface a8926575-6550-43c6-b23d-a15787ee76c0; detaching it from the instance and deleting it from the info cache [ 1757.048510] env[63379]: DEBUG nova.network.neutron [req-8a73ace7-6fac-4143-aa8d-0684362ccd0c req-c0760458-d5c5-49d8-9fb0-aee0eb3ee09c service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.209124] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0712c44-27f6-46d0-a472-ab1ef74f3147 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.226076] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91f12cf-eecd-4f22-bdb9-24618067987c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.228535] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779981, 'name': ReconfigVM_Task, 'duration_secs': 0.831665} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.228998] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 4b419aa8-d4da-45fd-a6da-6f05ee851f2f/4b419aa8-d4da-45fd-a6da-6f05ee851f2f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1757.230340] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca8373ab-fab1-4bf2-a57e-826d68c1e0a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.265870] env[63379]: DEBUG nova.network.neutron [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance_info_cache with network_info: [{"id": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "address": "fa:16:3e:63:27:ae", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d4cc07-97", "ovs_interfaceid": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.272316] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae58e14e-a8c2-4e33-ad72-e86f1dc91e07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.275726] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1757.275726] env[63379]: value = "task-1779984" [ 1757.275726] env[63379]: _type = "Task" [ 1757.275726] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.285119] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcf1716-4c79-477f-97ea-d21005614d1b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.293081] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779984, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.303266] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "3e875e92-673c-4cfa-86ce-fc270ae03e94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.303679] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.303779] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "3e875e92-673c-4cfa-86ce-fc270ae03e94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.303899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.304086] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.306606] env[63379]: DEBUG nova.compute.provider_tree [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1757.308122] env[63379]: INFO nova.compute.manager [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Terminating instance [ 1757.310980] env[63379]: DEBUG nova.compute.manager [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1757.311425] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1757.312087] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77104fb5-33cb-436a-8b49-4574535d5220 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.321234] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1757.322290] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0371aba6-e6f4-4e30-b96d-46d0415f7798 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.330426] env[63379]: DEBUG oslo_vmware.api [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1757.330426] env[63379]: value = "task-1779985" [ 1757.330426] env[63379]: _type = "Task" [ 1757.330426] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.339998] env[63379]: DEBUG oslo_vmware.api [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.412983] env[63379]: DEBUG nova.objects.base [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1757.421930] env[63379]: DEBUG oslo_vmware.api [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779983, 'name': PowerOnVM_Task, 'duration_secs': 0.467976} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.423085] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1757.423489] env[63379]: INFO nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1757.423839] env[63379]: DEBUG nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1757.425357] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11961f3f-3e29-4c84-872f-cdd16ae1556c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.473977] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.507399] env[63379]: DEBUG nova.network.neutron [-] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.511896] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Releasing lock "refresh_cache-19a41941-0679-4971-8a44-c95b13f5c294" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.512302] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Received event network-vif-unplugged-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1757.512506] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Acquiring lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.512920] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.513153] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.513399] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] No waiting events found dispatching network-vif-unplugged-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1757.513575] env[63379]: WARNING nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Received unexpected event network-vif-unplugged-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab for instance with vm_state shelved and task_state shelving_offloading. [ 1757.513784] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Received event network-changed-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1757.513980] env[63379]: DEBUG nova.compute.manager [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Refreshing instance network info cache due to event network-changed-1c98bd39-d74e-43ba-9a95-dcbdb4d073ab. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1757.514228] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Acquiring lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.514424] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Acquired lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.514725] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Refreshing network info cache for port 1c98bd39-d74e-43ba-9a95-dcbdb4d073ab {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1757.552207] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef353abb-38da-41b8-b52a-f1018a8120bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.563695] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3254cc-adac-437b-a6c1-7e17f47835e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.605574] env[63379]: DEBUG nova.compute.manager [req-8a73ace7-6fac-4143-aa8d-0684362ccd0c req-c0760458-d5c5-49d8-9fb0-aee0eb3ee09c service nova] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Detach interface failed, port_id=a8926575-6550-43c6-b23d-a15787ee76c0, reason: Instance 38be0e8d-188b-4a98-aedc-5d941b63c000 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1757.780791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.797740] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779984, 'name': Rename_Task, 'duration_secs': 0.251538} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.798583] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1757.798870] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7441c65e-a844-4428-b38e-2f4079a5951e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.808593] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1757.808593] env[63379]: value = "task-1779986" [ 1757.808593] env[63379]: _type = "Task" [ 1757.808593] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.814022] env[63379]: DEBUG nova.scheduler.client.report [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1757.824632] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.841468] env[63379]: DEBUG oslo_vmware.api [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779985, 'name': PowerOffVM_Task, 'duration_secs': 0.231675} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.841760] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1757.841964] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1757.842237] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0cc8c687-b804-49b0-9de6-d95a7a9bfc9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.953296] env[63379]: INFO nova.compute.manager [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Took 30.62 seconds to build instance. [ 1757.968063] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1757.968311] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1757.968503] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Deleting the datastore file [datastore1] 3e875e92-673c-4cfa-86ce-fc270ae03e94 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1757.974069] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-685603fb-ca2b-4ff4-9d2d-bd1bd793a34a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.982886] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.985235] env[63379]: DEBUG oslo_vmware.api [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for the task: (returnval){ [ 1757.985235] env[63379]: value = "task-1779988" [ 1757.985235] env[63379]: _type = "Task" [ 1757.985235] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.003586] env[63379]: DEBUG oslo_vmware.api [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.012298] env[63379]: INFO nova.compute.manager [-] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Took 1.68 seconds to deallocate network for instance. [ 1758.318755] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.439s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.328989] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779986, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.352791] env[63379]: DEBUG nova.network.neutron [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Updating instance_info_cache with network_info: [{"id": "1f122953-4fde-41ae-9895-0ef67cacb236", "address": "fa:16:3e:fc:b1:40", "network": {"id": "21aec006-8ff0-453c-a492-04a7c2a2a4a9", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-29133767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "26e96d98928449efaf2999f78cd52fac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f122953-4f", "ovs_interfaceid": "1f122953-4fde-41ae-9895-0ef67cacb236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.431176] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Updated VIF entry in instance network info cache for port 1c98bd39-d74e-43ba-9a95-dcbdb4d073ab. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1758.431340] env[63379]: DEBUG nova.network.neutron [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Updating instance_info_cache with network_info: [{"id": "1c98bd39-d74e-43ba-9a95-dcbdb4d073ab", "address": "fa:16:3e:8b:77:3f", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": null, "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap1c98bd39-d7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.453046] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37904bf9-625a-45c9-8184-4b413d3059f9 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.136s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.479646] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.497662] env[63379]: DEBUG oslo_vmware.api [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Task: {'id': task-1779988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208255} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.497662] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1758.497662] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1758.497662] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1758.497662] env[63379]: INFO nova.compute.manager [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1758.497662] env[63379]: DEBUG oslo.service.loopingcall [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.497662] env[63379]: DEBUG nova.compute.manager [-] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1758.497662] env[63379]: DEBUG nova.network.neutron [-] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1758.528137] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.528572] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.528694] env[63379]: DEBUG nova.objects.instance [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lazy-loading 'resources' on Instance uuid 38be0e8d-188b-4a98-aedc-5d941b63c000 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1758.797261] env[63379]: DEBUG nova.compute.manager [req-3f0be401-6c2e-4e06-9816-17650e0c8fb3 req-2e04be80-b0b8-4430-95b0-112ae7de2ded service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Received event network-vif-deleted-3f904953-8743-428b-8084-4936ee47a1df {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1758.797710] env[63379]: INFO nova.compute.manager [req-3f0be401-6c2e-4e06-9816-17650e0c8fb3 req-2e04be80-b0b8-4430-95b0-112ae7de2ded service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Neutron deleted interface 3f904953-8743-428b-8084-4936ee47a1df; detaching it from the instance and deleting it from the info cache [ 1758.797983] env[63379]: DEBUG nova.network.neutron [req-3f0be401-6c2e-4e06-9816-17650e0c8fb3 req-2e04be80-b0b8-4430-95b0-112ae7de2ded service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.820591] env[63379]: DEBUG oslo_vmware.api [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1779986, 'name': PowerOnVM_Task, 'duration_secs': 0.757122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.820805] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.821017] env[63379]: INFO nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1758.822370] env[63379]: DEBUG nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1758.822370] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ba3cfa-c724-4bac-b343-8bb44c570a51 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.841023] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158d3617-1ad5-4b0f-a76f-928aab8ea04d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.757s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.843252] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 3.091s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.844596] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.844596] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.844596] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.849224] env[63379]: INFO nova.compute.manager [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Terminating instance [ 1758.852119] env[63379]: DEBUG nova.compute.manager [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1758.852119] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1758.852119] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88be0e1e-605c-45e8-ac56-d05cd62fc1e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.858924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Releasing lock "refresh_cache-c900bb90-b4a8-40a2-9436-5a0ced1dd919" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.864103] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b16f439-b8a5-4634-9dc2-e717ea3491c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.880289] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1758.881228] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69cbe263-722f-4d59-9e8d-cc94a35b41d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.889038] env[63379]: DEBUG oslo_vmware.api [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1758.889038] env[63379]: value = "task-1779989" [ 1758.889038] env[63379]: _type = "Task" [ 1758.889038] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.907061] env[63379]: WARNING nova.virt.vmwareapi.vmops [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7edacb20-8472-4e9d-9408-31947d9f284e could not be found. [ 1758.907444] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1758.907669] env[63379]: INFO nova.compute.manager [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1758.907941] env[63379]: DEBUG oslo.service.loopingcall [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.911540] env[63379]: DEBUG nova.compute.manager [-] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1758.911540] env[63379]: DEBUG nova.network.neutron [-] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1758.912928] env[63379]: DEBUG oslo_vmware.api [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.933995] env[63379]: DEBUG oslo_concurrency.lockutils [req-421f9c0c-47b0-4320-943d-aba9b402f81e req-161950ac-be38-4156-b590-61c06ca7baf3 service nova] Releasing lock "refresh_cache-7edacb20-8472-4e9d-9408-31947d9f284e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.981310] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.127282] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.128884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.128884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.128884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.128884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.134672] env[63379]: INFO nova.compute.manager [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Terminating instance [ 1759.140120] env[63379]: DEBUG nova.compute.manager [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1759.140341] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1759.141211] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3f7e40-b6a1-4049-afe3-30d8a92270f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.154606] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1759.155422] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2366fa72-a5eb-4cb2-9e59-be70416c6a5e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.161857] env[63379]: DEBUG oslo_vmware.api [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1759.161857] env[63379]: value = "task-1779990" [ 1759.161857] env[63379]: _type = "Task" [ 1759.161857] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.171804] env[63379]: DEBUG oslo_vmware.api [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.271683] env[63379]: DEBUG nova.network.neutron [-] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.303307] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7eb3955e-2622-4607-84aa-cbbe419e4e41 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.306521] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4812f244-d61c-4a9e-8268-f2097252ec14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.330357] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance 'eda684fa-1595-4985-beb7-c298049411bf' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.348033] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f21b5e2-484a-405d-8d8f-0cd5f3002c5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.363627] env[63379]: INFO nova.compute.manager [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Took 24.47 seconds to build instance. [ 1759.386111] env[63379]: DEBUG nova.compute.manager [req-3f0be401-6c2e-4e06-9816-17650e0c8fb3 req-2e04be80-b0b8-4430-95b0-112ae7de2ded service nova] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Detach interface failed, port_id=3f904953-8743-428b-8084-4936ee47a1df, reason: Instance 3e875e92-673c-4cfa-86ce-fc270ae03e94 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1759.401177] env[63379]: DEBUG oslo_vmware.api [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779989, 'name': PowerOnVM_Task} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.402446] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1759.402656] env[63379]: DEBUG nova.compute.manager [None req-373921e5-3060-4b02-b08b-a4a21428eb96 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1759.403762] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8823c711-3d81-490a-adbd-a8bbabf9d3b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.407170] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40430efa-38f8-497a-ba29-91ff27c27b13 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.416620] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694668d6-056b-42ad-8d02-f43849de2034 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.452360] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4902daa-d614-4ef6-b8b9-1911968b0d45 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.461693] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06bc1d3-af31-46ee-96bd-9d7a9d4085e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.478095] env[63379]: DEBUG nova.compute.provider_tree [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.488525] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.650374] env[63379]: DEBUG nova.network.neutron [-] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.675150] env[63379]: DEBUG oslo_vmware.api [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779990, 'name': PowerOffVM_Task, 'duration_secs': 0.304405} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.675452] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1759.675634] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1759.676094] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3569d5e-eb6a-4432-9248-644450d2e9fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.774979] env[63379]: INFO nova.compute.manager [-] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Took 1.28 seconds to deallocate network for instance. [ 1759.847021] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1759.847021] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e98252d-5c5d-403c-af32-da02faf98051 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.855388] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1759.855388] env[63379]: value = "task-1779992" [ 1759.855388] env[63379]: _type = "Task" [ 1759.855388] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.865922] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.868796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5aa80a3a-13d7-4de1-99da-0df677d0fe6d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.988s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.957093] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1759.957390] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1759.957642] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Deleting the datastore file [datastore1] ebfe6204-c7d5-4e0c-bb63-74d5755552f6 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1759.957974] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fda3be40-303e-4f38-afbc-f784ad650440 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.965992] env[63379]: DEBUG oslo_vmware.api [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for the task: (returnval){ [ 1759.965992] env[63379]: value = "task-1779993" [ 1759.965992] env[63379]: _type = "Task" [ 1759.965992] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.974541] env[63379]: DEBUG oslo_vmware.api [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.991127] env[63379]: DEBUG nova.scheduler.client.report [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1760.003054] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.154862] env[63379]: INFO nova.compute.manager [-] [instance: 7edacb20-8472-4e9d-9408-31947d9f284e] Took 1.24 seconds to deallocate network for instance. [ 1760.287923] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.367176] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779992, 'name': PowerOffVM_Task, 'duration_secs': 0.228814} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.367176] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1760.367176] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance 'eda684fa-1595-4985-beb7-c298049411bf' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1760.473254] env[63379]: DEBUG nova.compute.manager [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Received event network-changed-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1760.473454] env[63379]: DEBUG nova.compute.manager [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Refreshing instance network info cache due to event network-changed-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1760.473669] env[63379]: DEBUG oslo_concurrency.lockutils [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.473820] env[63379]: DEBUG oslo_concurrency.lockutils [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.473984] env[63379]: DEBUG nova.network.neutron [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Refreshing network info cache for port d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1760.481395] env[63379]: DEBUG oslo_vmware.api [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Task: {'id': task-1779993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31176} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.486778] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1760.487368] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1760.487368] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1760.487368] env[63379]: INFO nova.compute.manager [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1760.487534] env[63379]: DEBUG oslo.service.loopingcall [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1760.488645] env[63379]: DEBUG nova.compute.manager [-] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1760.488751] env[63379]: DEBUG nova.network.neutron [-] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1760.496233] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.496740] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.500214] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.212s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.500214] env[63379]: DEBUG nova.objects.instance [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lazy-loading 'resources' on Instance uuid 3e875e92-673c-4cfa-86ce-fc270ae03e94 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1760.526238] env[63379]: INFO nova.scheduler.client.report [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Deleted allocations for instance 38be0e8d-188b-4a98-aedc-5d941b63c000 [ 1760.878530] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1760.879014] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1760.879581] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1760.879870] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1760.880102] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1760.880552] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1760.880986] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1760.881375] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1760.881560] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1760.881874] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1760.883122] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1760.889924] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f77a0002-1ca6-4d89-9288-0f5b4374e499 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.914842] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1760.914842] env[63379]: value = "task-1779994" [ 1760.914842] env[63379]: _type = "Task" [ 1760.914842] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.924972] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779994, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.992448] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.040731] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d91f1be4-fcf6-4325-913b-63dc162cb408 tempest-ServersWithSpecificFlavorTestJSON-765004270 tempest-ServersWithSpecificFlavorTestJSON-765004270-project-member] Lock "38be0e8d-188b-4a98-aedc-5d941b63c000" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.557s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.188307] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c9e21fe1-4256-4b66-9b51-cfaeaf8624d2 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "7edacb20-8472-4e9d-9408-31947d9f284e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.346s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.295265] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d6b762-8eea-45d7-91aa-35ed3a538fe1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.302677] env[63379]: DEBUG nova.network.neutron [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updated VIF entry in instance network info cache for port d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1761.302677] env[63379]: DEBUG nova.network.neutron [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.308427] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125e3065-a8c9-4bda-9d9a-f7a231828686 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.342761] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f5bf5d-5431-4460-a0d9-c3daabcb90ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.351929] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16280f7-3c5a-4365-b2c9-419eaaec393e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.367319] env[63379]: DEBUG nova.compute.provider_tree [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1761.429547] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779994, 'name': ReconfigVM_Task, 'duration_secs': 0.24047} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.430286] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance 'eda684fa-1595-4985-beb7-c298049411bf' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1761.493554] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.496019] env[63379]: DEBUG nova.network.neutron [-] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.805090] env[63379]: DEBUG oslo_concurrency.lockutils [req-09c08523-25f9-47a8-b54f-561c4287eae7 req-759d1e12-5f7c-4807-a33e-5850f930ecde service nova] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.889472] env[63379]: ERROR nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [req-fc174174-c85e-455a-a290-71b7078c956b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fc174174-c85e-455a-a290-71b7078c956b"}]} [ 1761.905924] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1761.925631] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1761.925849] env[63379]: DEBUG nova.compute.provider_tree [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1761.939687] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1761.939934] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1761.940574] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1761.940574] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1761.940702] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1761.941421] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1761.941713] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1761.941876] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1761.942088] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1761.942270] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1761.942508] env[63379]: DEBUG nova.virt.hardware [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1761.948045] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Reconfiguring VM instance instance-00000055 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1761.949186] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1761.951785] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d653e6c0-c0db-42d7-af5f-88e18bf857ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.972547] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1761.972547] env[63379]: value = "task-1779995" [ 1761.972547] env[63379]: _type = "Task" [ 1761.972547] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.983046] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1761.992588] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779995, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.998973] env[63379]: INFO nova.compute.manager [-] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Took 1.51 seconds to deallocate network for instance. [ 1761.999336] env[63379]: DEBUG oslo_vmware.api [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1779980, 'name': ReconfigVM_Task, 'duration_secs': 5.881436} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.005360] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.005665] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Reconfigured VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1762.186050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.186290] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.306053] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6c45d2-cae3-4244-8ded-36c9f3c5ae32 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.311540] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e971afa1-1966-4167-ab3d-eb2ad90e8a6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.348815] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78d3e94-cc05-424c-bf14-d77e55d61248 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.357575] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352aab99-f276-4d11-834d-90373dac641c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.376142] env[63379]: DEBUG nova.compute.provider_tree [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1762.426029] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.426235] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.426572] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.426973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.426973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.431017] env[63379]: INFO nova.compute.manager [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Terminating instance [ 1762.431422] env[63379]: DEBUG nova.compute.manager [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1762.431624] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1762.432485] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227626d7-d1c9-4889-a113-6a3a4d0eb98e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.440814] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1762.441103] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8336b8c6-2f28-4047-b93f-89eb8a266e05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.449239] env[63379]: DEBUG oslo_vmware.api [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1762.449239] env[63379]: value = "task-1779996" [ 1762.449239] env[63379]: _type = "Task" [ 1762.449239] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.458237] env[63379]: DEBUG oslo_vmware.api [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.484549] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779995, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.510020] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.689288] env[63379]: DEBUG nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1762.899007] env[63379]: ERROR nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] [req-114fbd07-203c-4e9d-b7d6-60e81b5d96ba] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-114fbd07-203c-4e9d-b7d6-60e81b5d96ba"}]} [ 1762.918716] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1762.936659] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1762.936659] env[63379]: DEBUG nova.compute.provider_tree [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1762.942031] env[63379]: DEBUG nova.compute.manager [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Received event network-vif-deleted-1ec6781c-1db3-427d-be1c-37534196f2f4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1762.942031] env[63379]: DEBUG nova.compute.manager [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received event network-vif-deleted-d06b25f4-72d4-4f91-82df-8ff330c8ddc2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1762.942902] env[63379]: INFO nova.compute.manager [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Neutron deleted interface d06b25f4-72d4-4f91-82df-8ff330c8ddc2; detaching it from the instance and deleting it from the info cache [ 1762.943257] env[63379]: DEBUG nova.network.neutron [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.956060] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1762.964692] env[63379]: DEBUG oslo_vmware.api [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779996, 'name': PowerOffVM_Task, 'duration_secs': 0.31922} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.965620] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.965808] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.966080] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee5e3f94-de8d-40b5-bd25-af50551fa43f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.973761] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1762.986088] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779995, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.136813] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1763.137096] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1763.137296] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleting the datastore file [datastore1] 8877e0f7-091b-4a91-bb5c-fb7733e5f70c {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1763.140058] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1827c08-bc73-469a-a4d8-3f87e6e64392 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.146931] env[63379]: DEBUG oslo_vmware.api [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1763.146931] env[63379]: value = "task-1779998" [ 1763.146931] env[63379]: _type = "Task" [ 1763.146931] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.158511] env[63379]: DEBUG oslo_vmware.api [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.220751] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.297526] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "2254844f-b1f9-435e-ac8a-f114f05331e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.297859] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.300483] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676bd0b8-3760-4cf3-bfe4-f6b0d82e0ee1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.310039] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8def38-5e76-4dda-9015-ae1abc137a6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.344344] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4ceacd-705b-4ec4-b6fc-0d5848dde927 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.352385] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb349d5-2f28-456e-926e-d8571e92916d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.357846] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.357846] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.357846] env[63379]: DEBUG nova.network.neutron [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1763.370237] env[63379]: DEBUG nova.compute.provider_tree [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1763.447186] env[63379]: DEBUG oslo_concurrency.lockutils [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.447368] env[63379]: DEBUG oslo_concurrency.lockutils [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] Acquired lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.448301] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c3b149-d92c-4d80-81d2-9768904dde77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.466088] env[63379]: DEBUG oslo_concurrency.lockutils [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] Releasing lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.466384] env[63379]: WARNING nova.compute.manager [req-bce5e8e4-8671-4179-b321-51d2043cbbed req-55cf4c39-bdbc-4c4e-b16f-3be59e0ae548 service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Detach interface failed, port_id=d06b25f4-72d4-4f91-82df-8ff330c8ddc2, reason: No device with interface-id d06b25f4-72d4-4f91-82df-8ff330c8ddc2 exists on VM: nova.exception.NotFound: No device with interface-id d06b25f4-72d4-4f91-82df-8ff330c8ddc2 exists on VM [ 1763.485983] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779995, 'name': ReconfigVM_Task, 'duration_secs': 1.156603} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.485983] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Reconfigured VM instance instance-00000055 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1763.486825] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e72e00-4e49-4739-8967-4a6684f60866 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.513446] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] eda684fa-1595-4985-beb7-c298049411bf/eda684fa-1595-4985-beb7-c298049411bf.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1763.513826] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bea3020-289d-4e84-aa24-179f7763f75f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.528073] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.528395] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.528718] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.529009] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.529912] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.532181] env[63379]: INFO nova.compute.manager [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Terminating instance [ 1763.534665] env[63379]: DEBUG nova.compute.manager [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1763.534903] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1763.535716] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36580c26-5c05-4403-b264-9513da709176 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.541991] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1763.541991] env[63379]: value = "task-1779999" [ 1763.541991] env[63379]: _type = "Task" [ 1763.541991] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.546705] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1763.550032] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-870e8d95-543e-48d7-a00e-06989c7274fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.552267] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779999, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.554173] env[63379]: DEBUG oslo_vmware.api [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1763.554173] env[63379]: value = "task-1780000" [ 1763.554173] env[63379]: _type = "Task" [ 1763.554173] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.564080] env[63379]: DEBUG oslo_vmware.api [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780000, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.658186] env[63379]: DEBUG oslo_vmware.api [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1779998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.446155} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.658493] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.658693] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.658923] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.659147] env[63379]: INFO nova.compute.manager [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1763.659428] env[63379]: DEBUG oslo.service.loopingcall [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.659978] env[63379]: DEBUG nova.compute.manager [-] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1763.659978] env[63379]: DEBUG nova.network.neutron [-] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1763.806497] env[63379]: DEBUG nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1763.911154] env[63379]: DEBUG nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 113 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1763.911502] env[63379]: DEBUG nova.compute.provider_tree [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 113 to 114 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1763.911649] env[63379]: DEBUG nova.compute.provider_tree [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1764.055826] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1779999, 'name': ReconfigVM_Task, 'duration_secs': 0.292891} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.059731] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Reconfigured VM instance instance-00000055 to attach disk [datastore1] eda684fa-1595-4985-beb7-c298049411bf/eda684fa-1595-4985-beb7-c298049411bf.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1764.060230] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance 'eda684fa-1595-4985-beb7-c298049411bf' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.068958] env[63379]: DEBUG oslo_vmware.api [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780000, 'name': PowerOffVM_Task, 'duration_secs': 0.22368} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.069418] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1764.069589] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1764.069846] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13543dd2-cde8-4098-9962-7f10b66d0a01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.229205] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1764.230344] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1764.232017] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleting the datastore file [datastore1] f983d089-7cfc-46a5-8f8d-f49f67aef1da {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1764.232017] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a401315f-f972-459c-8ca2-cb949cb00bf3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.240221] env[63379]: DEBUG oslo_vmware.api [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1764.240221] env[63379]: value = "task-1780002" [ 1764.240221] env[63379]: _type = "Task" [ 1764.240221] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.250174] env[63379]: DEBUG oslo_vmware.api [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.326893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.399357] env[63379]: DEBUG nova.network.neutron [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [{"id": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "address": "fa:16:3e:6a:5e:54", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3538ffcb-51", "ovs_interfaceid": "3538ffcb-51cd-414b-ad0e-080a6e1ff138", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.417259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.918s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.419816] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.910s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.420110] env[63379]: DEBUG nova.objects.instance [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lazy-loading 'resources' on Instance uuid ebfe6204-c7d5-4e0c-bb63-74d5755552f6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1764.442540] env[63379]: INFO nova.scheduler.client.report [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Deleted allocations for instance 3e875e92-673c-4cfa-86ce-fc270ae03e94 [ 1764.568210] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae82c88-23da-4898-9c48-2932b2979259 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.592557] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686d4f6c-96b7-484e-bb88-b4805567e1fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.612410] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance 'eda684fa-1595-4985-beb7-c298049411bf' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.639390] env[63379]: DEBUG nova.network.neutron [-] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.723205] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "9040201c-e1de-47d9-b9c2-b30c14e32749" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.723457] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.750540] env[63379]: DEBUG oslo_vmware.api [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.902560] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-f983d089-7cfc-46a5-8f8d-f49f67aef1da" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.949602] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75dff1ed-ed46-4c8e-badd-c1a1a615a9ce tempest-ServerTagsTestJSON-785899892 tempest-ServerTagsTestJSON-785899892-project-member] Lock "3e875e92-673c-4cfa-86ce-fc270ae03e94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.646s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.105280] env[63379]: DEBUG nova.compute.manager [req-5fd9c60f-4355-4c19-b6b6-dfc6798d5cc6 req-d6c09fdc-5c88-4104-a8bc-a047b84a7a3a service nova] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Received event network-vif-deleted-cff629c0-a0d5-447f-8157-45a44ad90882 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1765.142273] env[63379]: INFO nova.compute.manager [-] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Took 1.48 seconds to deallocate network for instance. [ 1765.193818] env[63379]: DEBUG nova.network.neutron [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Port 82d4cc07-9772-4f7e-87ba-1ef653e88fd3 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1765.213930] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e419c450-4cad-4743-92dc-5e09c8c34865 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.222792] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794e1d8f-26c7-46b0-bccb-c8d48ed80f98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.226291] env[63379]: DEBUG nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1765.260957] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80655a41-216c-483c-bfb9-7cd6663cb0bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.272432] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fa67d2-63e3-46af-9078-df5d9a9b7bdd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.276127] env[63379]: DEBUG oslo_vmware.api [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.533323} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.276865] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1765.277085] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1765.277278] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1765.277456] env[63379]: INFO nova.compute.manager [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1765.277727] env[63379]: DEBUG oslo.service.loopingcall [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1765.278315] env[63379]: DEBUG nova.compute.manager [-] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1765.278423] env[63379]: DEBUG nova.network.neutron [-] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1765.288463] env[63379]: DEBUG nova.compute.provider_tree [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.406198] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8e94cb8-6da7-4d1e-a236-53e59318213c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-f983d089-7cfc-46a5-8f8d-f49f67aef1da-d06b25f4-72d4-4f91-82df-8ff330c8ddc2" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.040s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.652051] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.763920] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.795185] env[63379]: DEBUG nova.scheduler.client.report [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1766.221198] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "eda684fa-1595-4985-beb7-c298049411bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.221455] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.221637] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.301628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.882s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.304857] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.083s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.306720] env[63379]: INFO nova.compute.claims [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1766.336313] env[63379]: INFO nova.scheduler.client.report [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Deleted allocations for instance ebfe6204-c7d5-4e0c-bb63-74d5755552f6 [ 1766.577033] env[63379]: DEBUG nova.network.neutron [-] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.851581] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8e74216d-45e0-4bbc-8997-23343bd85145 tempest-ServerMetadataNegativeTestJSON-899678029 tempest-ServerMetadataNegativeTestJSON-899678029-project-member] Lock "ebfe6204-c7d5-4e0c-bb63-74d5755552f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.723s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.082492] env[63379]: INFO nova.compute.manager [-] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Took 1.80 seconds to deallocate network for instance. [ 1767.136727] env[63379]: DEBUG nova.compute.manager [req-36c8ff75-27c4-4207-8715-f2bd73e4a62f req-b15bb6d0-aef8-4e33-86d3-c94101a870dd service nova] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Received event network-vif-deleted-3538ffcb-51cd-414b-ad0e-080a6e1ff138 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1767.259046] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.259451] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.259451] env[63379]: DEBUG nova.network.neutron [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1767.595484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.611195] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1ecd55-684a-4dc8-ab68-1904553f80dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.619811] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed72a2a-f19f-4c6e-a60b-bf26d42dbe94 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.651344] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1517cfd-b10d-4dc2-89ad-ae67d36bbb61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.659894] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2599f13e-fb4e-4183-a634-db2ea2da4f7f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.674830] env[63379]: DEBUG nova.compute.provider_tree [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.017429] env[63379]: DEBUG nova.network.neutron [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance_info_cache with network_info: [{"id": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "address": "fa:16:3e:63:27:ae", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d4cc07-97", "ovs_interfaceid": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.178571] env[63379]: DEBUG nova.scheduler.client.report [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1768.522774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.687064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.382s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.687064] env[63379]: DEBUG nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1768.689176] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.363s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.690657] env[63379]: INFO nova.compute.claims [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1769.055942] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8532bc1-8b10-460f-98ef-cfc973556d87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.083926] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31fefbe-031e-4b5c-8e8b-3f45bca4e775 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.092569] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance 'eda684fa-1595-4985-beb7-c298049411bf' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1769.170952] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "cb62192b-63db-40d0-97bb-1df171ade64b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.171213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "cb62192b-63db-40d0-97bb-1df171ade64b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.195799] env[63379]: DEBUG nova.compute.utils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1769.199208] env[63379]: DEBUG nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1769.199391] env[63379]: DEBUG nova.network.neutron [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1769.255194] env[63379]: DEBUG nova.policy [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991a93509b8943a693859488a56352b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928a9d102f0e45b897eae72fa566c0fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1769.513429] env[63379]: DEBUG nova.network.neutron [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Successfully created port: 1e94cb02-aa72-4461-85e0-63ff98d54c0e {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1769.600112] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1769.600436] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e595ef55-4e41-46d2-9ca3-ded90e5d23f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.608285] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1769.608285] env[63379]: value = "task-1780003" [ 1769.608285] env[63379]: _type = "Task" [ 1769.608285] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.617307] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.673030] env[63379]: DEBUG nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1769.700712] env[63379]: DEBUG nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1769.956876] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6791992-4939-4796-9fa0-d81899955b15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.965486] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140e3bc9-26da-4b9f-9a33-bf318816a51e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.997720] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ceee44-391d-46d2-b159-d79b091e9dd1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.007640] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca7366e-90ca-48a5-8a97-972aa31cd6cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.023272] env[63379]: DEBUG nova.compute.provider_tree [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1770.120545] env[63379]: DEBUG oslo_vmware.api [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780003, 'name': PowerOnVM_Task, 'duration_secs': 0.433812} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.120545] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1770.120545] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0516886d-eef1-446c-9be5-4e711425391a tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance 'eda684fa-1595-4985-beb7-c298049411bf' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1770.195610] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.526573] env[63379]: DEBUG nova.scheduler.client.report [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1770.712767] env[63379]: DEBUG nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1770.740033] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1770.740033] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1770.740033] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1770.740275] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1770.740469] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1770.740642] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1770.740859] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1770.741045] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1770.741223] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1770.741389] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1770.741569] env[63379]: DEBUG nova.virt.hardware [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1770.742486] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44502b9-f1b9-48d7-b508-11ec0beb5066 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.751109] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b9e094-b5bd-427e-8f6c-94c4b2269e86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.948396] env[63379]: DEBUG nova.compute.manager [req-200d417f-9a21-4f24-8736-a86a9277317f req-0856d8bb-e2dc-4c8c-a87d-afda7033d624 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Received event network-vif-plugged-1e94cb02-aa72-4461-85e0-63ff98d54c0e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1770.948769] env[63379]: DEBUG oslo_concurrency.lockutils [req-200d417f-9a21-4f24-8736-a86a9277317f req-0856d8bb-e2dc-4c8c-a87d-afda7033d624 service nova] Acquiring lock "0f248290-a14c-4c76-98b3-4efa5bda5f05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.949075] env[63379]: DEBUG oslo_concurrency.lockutils [req-200d417f-9a21-4f24-8736-a86a9277317f req-0856d8bb-e2dc-4c8c-a87d-afda7033d624 service nova] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.949121] env[63379]: DEBUG oslo_concurrency.lockutils [req-200d417f-9a21-4f24-8736-a86a9277317f req-0856d8bb-e2dc-4c8c-a87d-afda7033d624 service nova] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.949271] env[63379]: DEBUG nova.compute.manager [req-200d417f-9a21-4f24-8736-a86a9277317f req-0856d8bb-e2dc-4c8c-a87d-afda7033d624 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] No waiting events found dispatching network-vif-plugged-1e94cb02-aa72-4461-85e0-63ff98d54c0e {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1770.949444] env[63379]: WARNING nova.compute.manager [req-200d417f-9a21-4f24-8736-a86a9277317f req-0856d8bb-e2dc-4c8c-a87d-afda7033d624 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Received unexpected event network-vif-plugged-1e94cb02-aa72-4461-85e0-63ff98d54c0e for instance with vm_state building and task_state spawning. [ 1771.031717] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.032272] env[63379]: DEBUG nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1771.036028] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.384s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.036380] env[63379]: DEBUG nova.objects.instance [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lazy-loading 'resources' on Instance uuid 8877e0f7-091b-4a91-bb5c-fb7733e5f70c {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1771.106134] env[63379]: DEBUG nova.network.neutron [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Successfully updated port: 1e94cb02-aa72-4461-85e0-63ff98d54c0e {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1771.540497] env[63379]: DEBUG nova.compute.utils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1771.544484] env[63379]: DEBUG nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1771.544692] env[63379]: DEBUG nova.network.neutron [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1771.581650] env[63379]: DEBUG nova.policy [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6002d60826e740f0a8c7be3db1fcc8d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c1356055b3b84b099dbb7fe5716c1c30', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1771.608499] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-0f248290-a14c-4c76-98b3-4efa5bda5f05" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.608652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-0f248290-a14c-4c76-98b3-4efa5bda5f05" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.608795] env[63379]: DEBUG nova.network.neutron [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1771.769883] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52c6500-d4b1-455e-b555-e206da3c7b75 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.777185] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe753a8a-eb8d-4c1d-b7ce-b2c51fb32d1d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.807189] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e43a4b-266d-4e1a-bf86-1a4c1492e88b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.814779] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4b1964-5186-478e-97ab-c6aab42fd406 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.828488] env[63379]: DEBUG nova.compute.provider_tree [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1771.857318] env[63379]: DEBUG nova.network.neutron [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Successfully created port: 7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1771.881242] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "eda684fa-1595-4985-beb7-c298049411bf" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.881504] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.881693] env[63379]: DEBUG nova.compute.manager [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Going to confirm migration 3 {{(pid=63379) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1772.045819] env[63379]: DEBUG nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1772.143246] env[63379]: DEBUG nova.network.neutron [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1772.275362] env[63379]: DEBUG nova.network.neutron [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Updating instance_info_cache with network_info: [{"id": "1e94cb02-aa72-4461-85e0-63ff98d54c0e", "address": "fa:16:3e:69:a0:ca", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e94cb02-aa", "ovs_interfaceid": "1e94cb02-aa72-4461-85e0-63ff98d54c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.332019] env[63379]: DEBUG nova.scheduler.client.report [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1772.444957] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.445238] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.445367] env[63379]: DEBUG nova.network.neutron [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.445562] env[63379]: DEBUG nova.objects.instance [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'info_cache' on Instance uuid eda684fa-1595-4985-beb7-c298049411bf {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1772.777800] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-0f248290-a14c-4c76-98b3-4efa5bda5f05" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.778189] env[63379]: DEBUG nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Instance network_info: |[{"id": "1e94cb02-aa72-4461-85e0-63ff98d54c0e", "address": "fa:16:3e:69:a0:ca", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e94cb02-aa", "ovs_interfaceid": "1e94cb02-aa72-4461-85e0-63ff98d54c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1772.778636] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:a0:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e94cb02-aa72-4461-85e0-63ff98d54c0e', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1772.786355] env[63379]: DEBUG oslo.service.loopingcall [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.786588] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1772.786889] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afaf5208-016d-4217-8ac2-d76584a2deeb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.806128] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1772.806128] env[63379]: value = "task-1780004" [ 1772.806128] env[63379]: _type = "Task" [ 1772.806128] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.813606] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780004, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.837718] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.840044] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.076s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.841555] env[63379]: INFO nova.compute.claims [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1772.860123] env[63379]: INFO nova.scheduler.client.report [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleted allocations for instance 8877e0f7-091b-4a91-bb5c-fb7733e5f70c [ 1772.972952] env[63379]: DEBUG nova.compute.manager [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Received event network-changed-1e94cb02-aa72-4461-85e0-63ff98d54c0e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1772.973283] env[63379]: DEBUG nova.compute.manager [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Refreshing instance network info cache due to event network-changed-1e94cb02-aa72-4461-85e0-63ff98d54c0e. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1772.973534] env[63379]: DEBUG oslo_concurrency.lockutils [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] Acquiring lock "refresh_cache-0f248290-a14c-4c76-98b3-4efa5bda5f05" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.973689] env[63379]: DEBUG oslo_concurrency.lockutils [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] Acquired lock "refresh_cache-0f248290-a14c-4c76-98b3-4efa5bda5f05" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.973887] env[63379]: DEBUG nova.network.neutron [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Refreshing network info cache for port 1e94cb02-aa72-4461-85e0-63ff98d54c0e {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1773.055033] env[63379]: DEBUG nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1773.079364] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1773.079635] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1773.079796] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1773.079988] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1773.080157] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1773.080311] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1773.080525] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1773.080693] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1773.080866] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1773.081045] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1773.081227] env[63379]: DEBUG nova.virt.hardware [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1773.082099] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284e99f5-cf8f-49a3-aac9-0407adfc93ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.092388] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6ee90d-bcde-4799-b1eb-9293b14f5e43 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.316577] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780004, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.331678] env[63379]: DEBUG nova.network.neutron [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Successfully updated port: 7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1773.367419] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cadf2f1c-38b3-40af-93bb-2fab0eafa3f3 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "8877e0f7-091b-4a91-bb5c-fb7733e5f70c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.941s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.817456] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780004, 'name': CreateVM_Task, 'duration_secs': 0.719789} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.817805] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1773.818400] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.818576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.818905] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1773.819191] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d9dd178-aa5f-4d55-a62b-421e93ee4931 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.823989] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1773.823989] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528e3726-2142-e994-c929-45e5b220bf85" [ 1773.823989] env[63379]: _type = "Task" [ 1773.823989] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.824886] env[63379]: DEBUG nova.network.neutron [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance_info_cache with network_info: [{"id": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "address": "fa:16:3e:63:27:ae", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d4cc07-97", "ovs_interfaceid": "82d4cc07-9772-4f7e-87ba-1ef653e88fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.834156] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528e3726-2142-e994-c929-45e5b220bf85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.836909] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "refresh_cache-2254844f-b1f9-435e-ac8a-f114f05331e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.837010] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquired lock "refresh_cache-2254844f-b1f9-435e-ac8a-f114f05331e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.837192] env[63379]: DEBUG nova.network.neutron [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1773.855605] env[63379]: DEBUG nova.network.neutron [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Updated VIF entry in instance network info cache for port 1e94cb02-aa72-4461-85e0-63ff98d54c0e. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1773.855986] env[63379]: DEBUG nova.network.neutron [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Updating instance_info_cache with network_info: [{"id": "1e94cb02-aa72-4461-85e0-63ff98d54c0e", "address": "fa:16:3e:69:a0:ca", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e94cb02-aa", "ovs_interfaceid": "1e94cb02-aa72-4461-85e0-63ff98d54c0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.081227] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25acf951-acf4-43d6-b246-03cf60f1c7ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.090858] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e521da-0125-4ff3-a6d0-10eed054a1a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.123410] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1eb9f7e-19a2-4f31-8aa3-779d5e617368 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.132047] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64c5f49-b7e6-4cbe-be34-fef2a3d7d8c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.146136] env[63379]: DEBUG nova.compute.provider_tree [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.331265] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-eda684fa-1595-4985-beb7-c298049411bf" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.331555] env[63379]: DEBUG nova.objects.instance [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'migration_context' on Instance uuid eda684fa-1595-4985-beb7-c298049411bf {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1774.338649] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528e3726-2142-e994-c929-45e5b220bf85, 'name': SearchDatastore_Task, 'duration_secs': 0.0104} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.339452] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.339681] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1774.340973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.340973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.340973] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1774.342297] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f951a76-292a-4941-adac-0ba9ac770b1d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.351147] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1774.351336] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1774.352053] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1955ac25-7f90-44f2-8027-6bf0b374f6f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.357082] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1774.357082] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a465a6-86ef-3a5b-0544-930574ca65ac" [ 1774.357082] env[63379]: _type = "Task" [ 1774.357082] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.360780] env[63379]: DEBUG oslo_concurrency.lockutils [req-924b3d1f-1d82-4bf4-99aa-ff27be2ad5c8 req-b409d912-87d2-4173-b039-24c9071993e4 service nova] Releasing lock "refresh_cache-0f248290-a14c-4c76-98b3-4efa5bda5f05" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.365606] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a465a6-86ef-3a5b-0544-930574ca65ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.371062] env[63379]: DEBUG nova.network.neutron [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1774.492574] env[63379]: DEBUG nova.network.neutron [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Updating instance_info_cache with network_info: [{"id": "7de015d0-7fb7-43e2-87cb-c9cafb6e4e26", "address": "fa:16:3e:ae:80:ad", "network": {"id": "bd6b222f-012e-4022-8e8c-0a0cb9f094d5", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1745271163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1356055b3b84b099dbb7fe5716c1c30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de015d0-7f", "ovs_interfaceid": "7de015d0-7fb7-43e2-87cb-c9cafb6e4e26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.515237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.515502] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.515713] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.515901] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.516084] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.518048] env[63379]: INFO nova.compute.manager [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Terminating instance [ 1774.519681] env[63379]: DEBUG nova.compute.manager [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1774.519854] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1774.520699] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f91da83-a572-4eee-af4d-09320f355dfb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.528901] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1774.529137] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6df70c4-8e33-48e9-b200-6d69d89150d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.535398] env[63379]: DEBUG oslo_vmware.api [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1774.535398] env[63379]: value = "task-1780005" [ 1774.535398] env[63379]: _type = "Task" [ 1774.535398] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.542982] env[63379]: DEBUG oslo_vmware.api [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.649624] env[63379]: DEBUG nova.scheduler.client.report [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1774.834742] env[63379]: DEBUG nova.objects.base [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1774.835781] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7443d43-1918-43de-8a70-31edb240ca83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.855022] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a3b11ab-c182-4f17-819f-4e8991f0a1be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.861458] env[63379]: DEBUG oslo_vmware.api [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1774.861458] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f886ee-c04d-0f85-8ac9-70580e7a0fb4" [ 1774.861458] env[63379]: _type = "Task" [ 1774.861458] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.868083] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a465a6-86ef-3a5b-0544-930574ca65ac, 'name': SearchDatastore_Task, 'duration_secs': 0.008676} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.869079] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f88717e-8bab-4fd2-8deb-9eef00e8a6d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.874052] env[63379]: DEBUG oslo_vmware.api [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f886ee-c04d-0f85-8ac9-70580e7a0fb4, 'name': SearchDatastore_Task, 'duration_secs': 0.006899} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.874607] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.877292] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1774.877292] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268866f-970a-7206-1ef7-953ca3d8779c" [ 1774.877292] env[63379]: _type = "Task" [ 1774.877292] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.884095] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268866f-970a-7206-1ef7-953ca3d8779c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.995703] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Releasing lock "refresh_cache-2254844f-b1f9-435e-ac8a-f114f05331e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.996077] env[63379]: DEBUG nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Instance network_info: |[{"id": "7de015d0-7fb7-43e2-87cb-c9cafb6e4e26", "address": "fa:16:3e:ae:80:ad", "network": {"id": "bd6b222f-012e-4022-8e8c-0a0cb9f094d5", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1745271163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1356055b3b84b099dbb7fe5716c1c30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de015d0-7f", "ovs_interfaceid": "7de015d0-7fb7-43e2-87cb-c9cafb6e4e26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1774.997301] env[63379]: DEBUG nova.compute.manager [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Received event network-vif-plugged-7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1774.997512] env[63379]: DEBUG oslo_concurrency.lockutils [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] Acquiring lock "2254844f-b1f9-435e-ac8a-f114f05331e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.997733] env[63379]: DEBUG oslo_concurrency.lockutils [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.997932] env[63379]: DEBUG oslo_concurrency.lockutils [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.998134] env[63379]: DEBUG nova.compute.manager [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] No waiting events found dispatching network-vif-plugged-7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1774.998308] env[63379]: WARNING nova.compute.manager [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Received unexpected event network-vif-plugged-7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 for instance with vm_state building and task_state spawning. [ 1774.998474] env[63379]: DEBUG nova.compute.manager [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Received event network-changed-7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1774.998629] env[63379]: DEBUG nova.compute.manager [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Refreshing instance network info cache due to event network-changed-7de015d0-7fb7-43e2-87cb-c9cafb6e4e26. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1774.998824] env[63379]: DEBUG oslo_concurrency.lockutils [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] Acquiring lock "refresh_cache-2254844f-b1f9-435e-ac8a-f114f05331e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.998964] env[63379]: DEBUG oslo_concurrency.lockutils [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] Acquired lock "refresh_cache-2254844f-b1f9-435e-ac8a-f114f05331e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.999135] env[63379]: DEBUG nova.network.neutron [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Refreshing network info cache for port 7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1775.000330] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:80:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93c5b7ce-4c84-40bc-884c-b2453e0eee69', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7de015d0-7fb7-43e2-87cb-c9cafb6e4e26', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1775.007970] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Creating folder: Project (c1356055b3b84b099dbb7fe5716c1c30). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1775.010787] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bbd7cd5-2c90-4a69-90de-a693e5784806 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.022349] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Created folder: Project (c1356055b3b84b099dbb7fe5716c1c30) in parent group-v369214. [ 1775.022534] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Creating folder: Instances. Parent ref: group-v369463. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1775.022749] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffc2bf9d-193d-4f57-b5bd-a3878452125e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.031830] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Created folder: Instances in parent group-v369463. [ 1775.032066] env[63379]: DEBUG oslo.service.loopingcall [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.032248] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1775.032441] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f918ed38-7c74-43c3-9f1c-bff44fa094c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.056376] env[63379]: DEBUG oslo_vmware.api [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780005, 'name': PowerOffVM_Task, 'duration_secs': 0.242359} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.057678] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1775.057678] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1775.057923] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1775.057923] env[63379]: value = "task-1780008" [ 1775.057923] env[63379]: _type = "Task" [ 1775.057923] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.058093] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9bf22f79-b9c2-485a-9a34-99488fc6505a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.066814] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780008, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.138112] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1775.138385] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1775.138543] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleting the datastore file [datastore1] b9bc2562-9475-400e-9cf9-646b8f4c8cf2 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1775.138825] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d98d5ee9-d778-4382-9526-7e068117527e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.144816] env[63379]: DEBUG oslo_vmware.api [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1775.144816] env[63379]: value = "task-1780010" [ 1775.144816] env[63379]: _type = "Task" [ 1775.144816] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.154422] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.314s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.155035] env[63379]: DEBUG nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1775.157790] env[63379]: DEBUG oslo_vmware.api [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.160670] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.565s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.160927] env[63379]: DEBUG nova.objects.instance [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'resources' on Instance uuid f983d089-7cfc-46a5-8f8d-f49f67aef1da {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1775.262077] env[63379]: DEBUG nova.network.neutron [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Updated VIF entry in instance network info cache for port 7de015d0-7fb7-43e2-87cb-c9cafb6e4e26. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1775.262524] env[63379]: DEBUG nova.network.neutron [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Updating instance_info_cache with network_info: [{"id": "7de015d0-7fb7-43e2-87cb-c9cafb6e4e26", "address": "fa:16:3e:ae:80:ad", "network": {"id": "bd6b222f-012e-4022-8e8c-0a0cb9f094d5", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1745271163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1356055b3b84b099dbb7fe5716c1c30", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de015d0-7f", "ovs_interfaceid": "7de015d0-7fb7-43e2-87cb-c9cafb6e4e26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.388221] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268866f-970a-7206-1ef7-953ca3d8779c, 'name': SearchDatastore_Task, 'duration_secs': 0.00885} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.388496] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.388757] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0f248290-a14c-4c76-98b3-4efa5bda5f05/0f248290-a14c-4c76-98b3-4efa5bda5f05.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1775.389025] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-726236a1-ef79-4c92-8ed1-ea2775a7416b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.396027] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1775.396027] env[63379]: value = "task-1780011" [ 1775.396027] env[63379]: _type = "Task" [ 1775.396027] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.402882] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.571340] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780008, 'name': CreateVM_Task, 'duration_secs': 0.333683} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.571625] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1775.572544] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.572833] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.573187] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.573519] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e706e7bc-fc90-4d65-92f7-adca656ef7d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.581738] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1775.581738] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528afc53-40eb-7f1a-7914-5569c79683a4" [ 1775.581738] env[63379]: _type = "Task" [ 1775.581738] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.594457] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528afc53-40eb-7f1a-7914-5569c79683a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.654844] env[63379]: DEBUG oslo_vmware.api [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134253} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.655151] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1775.655343] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1775.655528] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1775.655710] env[63379]: INFO nova.compute.manager [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1775.655969] env[63379]: DEBUG oslo.service.loopingcall [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.656197] env[63379]: DEBUG nova.compute.manager [-] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1775.656284] env[63379]: DEBUG nova.network.neutron [-] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1775.663557] env[63379]: DEBUG nova.compute.utils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1775.667459] env[63379]: DEBUG nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1775.667635] env[63379]: DEBUG nova.network.neutron [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1775.743743] env[63379]: DEBUG nova.policy [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '756ff556130a4855b461899fece1e1fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3363a90de2d4d5988ddd03974c10d0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1775.765204] env[63379]: DEBUG oslo_concurrency.lockutils [req-85c4d8bd-d6bb-4465-8713-2ef31d33ad3d req-f7048c5d-10ea-489f-bcbf-8ad84101aab1 service nova] Releasing lock "refresh_cache-2254844f-b1f9-435e-ac8a-f114f05331e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.908557] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486046} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.909202] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0f248290-a14c-4c76-98b3-4efa5bda5f05/0f248290-a14c-4c76-98b3-4efa5bda5f05.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1775.909202] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1775.909528] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-429d24ec-8b5d-4631-9abb-55d9906ff8c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.916144] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1775.916144] env[63379]: value = "task-1780012" [ 1775.916144] env[63379]: _type = "Task" [ 1775.916144] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.924611] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.942022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8eafc7d-17ba-46aa-802d-ae25d2d11a1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.950879] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058d0878-074b-4d84-85a7-81df05b87f42 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.984128] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba80c9ca-abe2-422f-b7de-f5c7d8c48c90 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.991847] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952bab4f-1439-4caf-9632-aa63bbec2491 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.006396] env[63379]: DEBUG nova.compute.provider_tree [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.093875] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528afc53-40eb-7f1a-7914-5569c79683a4, 'name': SearchDatastore_Task, 'duration_secs': 0.066795} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.093875] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.093983] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1776.094258] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.094372] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.094559] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1776.094833] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccbf20af-84a8-40d3-b514-e90e4c852f16 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.103803] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1776.104042] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1776.106054] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9be52043-a8d1-46c5-8d63-4e7c63b4374c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.113267] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1776.113267] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c8c2e6-cf73-3d73-47e8-efb1569c613a" [ 1776.113267] env[63379]: _type = "Task" [ 1776.113267] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.124590] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c8c2e6-cf73-3d73-47e8-efb1569c613a, 'name': SearchDatastore_Task, 'duration_secs': 0.009743} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.125454] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5506fe3-8406-44b4-8876-628545828ae7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.130714] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1776.130714] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52df75b0-7ede-8dcd-beb1-59ba5cf73801" [ 1776.130714] env[63379]: _type = "Task" [ 1776.130714] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.139812] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52df75b0-7ede-8dcd-beb1-59ba5cf73801, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.171066] env[63379]: DEBUG nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1776.230222] env[63379]: DEBUG nova.network.neutron [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Successfully created port: ec9d8d4a-76e2-4945-ac09-1e225b358218 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1776.425949] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066938} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.426245] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1776.427118] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cd8ad9-cc39-4684-9c6e-342167f4391a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.449396] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 0f248290-a14c-4c76-98b3-4efa5bda5f05/0f248290-a14c-4c76-98b3-4efa5bda5f05.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1776.449697] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89ac635c-3365-4f35-a90d-17da82e6366b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.470421] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1776.470421] env[63379]: value = "task-1780013" [ 1776.470421] env[63379]: _type = "Task" [ 1776.470421] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.478460] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.513640] env[63379]: DEBUG nova.scheduler.client.report [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1776.609452] env[63379]: DEBUG nova.network.neutron [-] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.642218] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52df75b0-7ede-8dcd-beb1-59ba5cf73801, 'name': SearchDatastore_Task, 'duration_secs': 0.009318} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.642496] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.642800] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2254844f-b1f9-435e-ac8a-f114f05331e1/2254844f-b1f9-435e-ac8a-f114f05331e1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1776.643077] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14573d4c-f063-4f45-bea5-79852c982513 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.650515] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1776.650515] env[63379]: value = "task-1780014" [ 1776.650515] env[63379]: _type = "Task" [ 1776.650515] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.658647] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.984916] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.019140] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.021815] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.826s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.023567] env[63379]: INFO nova.compute.claims [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1777.047296] env[63379]: INFO nova.scheduler.client.report [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted allocations for instance f983d089-7cfc-46a5-8f8d-f49f67aef1da [ 1777.103234] env[63379]: DEBUG nova.compute.manager [req-5a4ac206-15e5-47e0-823f-550c746a8e2d req-d6eb6bda-77aa-4ee9-977d-19a5e83e137f service nova] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Received event network-vif-deleted-04979812-3eaa-4eb6-9a6f-306c8544d56b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1777.114023] env[63379]: INFO nova.compute.manager [-] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Took 1.46 seconds to deallocate network for instance. [ 1777.162836] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780014, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.181092] env[63379]: DEBUG nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1777.210562] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1777.210824] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1777.210981] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1777.211181] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1777.211842] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1777.211842] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1777.211842] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1777.211842] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1777.212062] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1777.212192] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1777.212370] env[63379]: DEBUG nova.virt.hardware [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1777.213287] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8db982-e0c1-4c45-9051-cbecd55c0212 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.222495] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb171f1f-4a31-4b61-afa1-05b3b32e03ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.500922] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780013, 'name': ReconfigVM_Task, 'duration_secs': 0.649702} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.500922] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 0f248290-a14c-4c76-98b3-4efa5bda5f05/0f248290-a14c-4c76-98b3-4efa5bda5f05.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1777.501543] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4287b40-e4de-4a09-b4f1-b321df871e87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.513540] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1777.513540] env[63379]: value = "task-1780015" [ 1777.513540] env[63379]: _type = "Task" [ 1777.513540] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.523316] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780015, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.555881] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9772ea9e-f7c8-472e-b392-24b4ef41f2cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "f983d089-7cfc-46a5-8f8d-f49f67aef1da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.027s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.619501] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.661393] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525368} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.661710] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2254844f-b1f9-435e-ac8a-f114f05331e1/2254844f-b1f9-435e-ac8a-f114f05331e1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1777.661936] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1777.662213] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-311a037f-b1d4-4c0d-a9af-b85091f8c5f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.669104] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1777.669104] env[63379]: value = "task-1780016" [ 1777.669104] env[63379]: _type = "Task" [ 1777.669104] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.677551] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780016, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.826393] env[63379]: DEBUG nova.network.neutron [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Successfully updated port: ec9d8d4a-76e2-4945-ac09-1e225b358218 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1778.024197] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780015, 'name': Rename_Task, 'duration_secs': 0.148238} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.024522] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1778.024775] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c27f751-61bf-471c-90d1-b58c658804f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.033227] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1778.033227] env[63379]: value = "task-1780017" [ 1778.033227] env[63379]: _type = "Task" [ 1778.033227] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.041408] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780017, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.190157] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780016, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063211} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.190589] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1778.191415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ef3d9a-a31f-408a-b688-55c07a6d7968 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.232371] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 2254844f-b1f9-435e-ac8a-f114f05331e1/2254844f-b1f9-435e-ac8a-f114f05331e1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1778.234244] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1855fb2b-19c2-48ed-9f40-aaec584679bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.276123] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1778.276123] env[63379]: value = "task-1780018" [ 1778.276123] env[63379]: _type = "Task" [ 1778.276123] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.276123] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780018, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.333001] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-9040201c-e1de-47d9-b9c2-b30c14e32749" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.333001] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-9040201c-e1de-47d9-b9c2-b30c14e32749" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.333001] env[63379]: DEBUG nova.network.neutron [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1778.398053] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41dbb17-9d36-419b-b5f9-3c72f2a9323e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.407707] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711524a3-87ff-436c-aced-ef01d9a298fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.442515] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dea190-ad0c-47ca-8a95-ea81f333300c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.450595] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c83178-ffa7-4d49-aaa8-73c4040534f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.463893] env[63379]: DEBUG nova.compute.provider_tree [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1778.543477] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780017, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.768770] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780018, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.872827] env[63379]: DEBUG nova.network.neutron [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1778.967944] env[63379]: DEBUG nova.scheduler.client.report [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1779.037089] env[63379]: DEBUG nova.network.neutron [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Updating instance_info_cache with network_info: [{"id": "ec9d8d4a-76e2-4945-ac09-1e225b358218", "address": "fa:16:3e:0e:07:8c", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec9d8d4a-76", "ovs_interfaceid": "ec9d8d4a-76e2-4945-ac09-1e225b358218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.049094] env[63379]: DEBUG oslo_vmware.api [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780017, 'name': PowerOnVM_Task, 'duration_secs': 0.763746} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.049707] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1779.049707] env[63379]: INFO nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Took 8.34 seconds to spawn the instance on the hypervisor. [ 1779.049826] env[63379]: DEBUG nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1779.050529] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79695a90-7a86-4f0c-a32f-13831cbeafbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.129795] env[63379]: DEBUG nova.compute.manager [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Received event network-vif-plugged-ec9d8d4a-76e2-4945-ac09-1e225b358218 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1779.130180] env[63379]: DEBUG oslo_concurrency.lockutils [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] Acquiring lock "9040201c-e1de-47d9-b9c2-b30c14e32749-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.130487] env[63379]: DEBUG oslo_concurrency.lockutils [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.130670] env[63379]: DEBUG oslo_concurrency.lockutils [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.130848] env[63379]: DEBUG nova.compute.manager [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] No waiting events found dispatching network-vif-plugged-ec9d8d4a-76e2-4945-ac09-1e225b358218 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1779.131031] env[63379]: WARNING nova.compute.manager [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Received unexpected event network-vif-plugged-ec9d8d4a-76e2-4945-ac09-1e225b358218 for instance with vm_state building and task_state spawning. [ 1779.131199] env[63379]: DEBUG nova.compute.manager [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Received event network-changed-ec9d8d4a-76e2-4945-ac09-1e225b358218 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1779.131361] env[63379]: DEBUG nova.compute.manager [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Refreshing instance network info cache due to event network-changed-ec9d8d4a-76e2-4945-ac09-1e225b358218. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1779.131530] env[63379]: DEBUG oslo_concurrency.lockutils [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] Acquiring lock "refresh_cache-9040201c-e1de-47d9-b9c2-b30c14e32749" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.270757] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780018, 'name': ReconfigVM_Task, 'duration_secs': 0.513663} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.270860] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 2254844f-b1f9-435e-ac8a-f114f05331e1/2254844f-b1f9-435e-ac8a-f114f05331e1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1779.271547] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-beea2325-3992-4525-8398-22887246439a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.278048] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1779.278048] env[63379]: value = "task-1780019" [ 1779.278048] env[63379]: _type = "Task" [ 1779.278048] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.287316] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780019, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.472648] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.473213] env[63379]: DEBUG nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1779.475733] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.601s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.544783] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-9040201c-e1de-47d9-b9c2-b30c14e32749" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.544783] env[63379]: DEBUG nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Instance network_info: |[{"id": "ec9d8d4a-76e2-4945-ac09-1e225b358218", "address": "fa:16:3e:0e:07:8c", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec9d8d4a-76", "ovs_interfaceid": "ec9d8d4a-76e2-4945-ac09-1e225b358218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1779.545131] env[63379]: DEBUG oslo_concurrency.lockutils [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] Acquired lock "refresh_cache-9040201c-e1de-47d9-b9c2-b30c14e32749" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.545377] env[63379]: DEBUG nova.network.neutron [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Refreshing network info cache for port ec9d8d4a-76e2-4945-ac09-1e225b358218 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1779.546696] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:07:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec9d8d4a-76e2-4945-ac09-1e225b358218', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1779.554927] env[63379]: DEBUG oslo.service.loopingcall [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1779.555415] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1779.555661] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba003878-fcc9-4f94-8405-370a167adab9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.581725] env[63379]: INFO nova.compute.manager [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Took 16.38 seconds to build instance. [ 1779.585599] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1779.585599] env[63379]: value = "task-1780020" [ 1779.585599] env[63379]: _type = "Task" [ 1779.585599] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.594283] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780020, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.654033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.654340] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.789893] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780019, 'name': Rename_Task, 'duration_secs': 0.342357} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.789893] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1779.790051] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb3c23d4-f5ba-4c53-af28-f980e438eab3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.796983] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1779.796983] env[63379]: value = "task-1780021" [ 1779.796983] env[63379]: _type = "Task" [ 1779.796983] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.804747] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780021, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.929854] env[63379]: DEBUG oslo_concurrency.lockutils [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.964664] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.965165] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.965351] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1779.965524] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 1779.978939] env[63379]: DEBUG nova.compute.utils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1779.984032] env[63379]: DEBUG nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1779.984175] env[63379]: DEBUG nova.network.neutron [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1780.051018] env[63379]: DEBUG nova.policy [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8248c5ed17044fdb5c07c0534d9354f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '734449c322394434a93a7b427d8ed7e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1780.083978] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8707d96-dff3-41eb-9be0-1ea8ae517b57 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.897s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.084272] env[63379]: DEBUG oslo_concurrency.lockutils [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.155s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.084553] env[63379]: DEBUG nova.compute.manager [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1780.085644] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ae56a8-b1f3-4c61-ad41-ca874de790d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.097095] env[63379]: DEBUG nova.compute.manager [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1780.097736] env[63379]: DEBUG nova.objects.instance [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'flavor' on Instance uuid 0f248290-a14c-4c76-98b3-4efa5bda5f05 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1780.104090] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780020, 'name': CreateVM_Task, 'duration_secs': 0.476369} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.105081] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1780.105423] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.105603] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.105934] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1780.106235] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa74acea-269c-4396-ad6c-5277d698a259 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.116117] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1780.116117] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526fa273-5d63-3619-906c-1ef95e8b550c" [ 1780.116117] env[63379]: _type = "Task" [ 1780.116117] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.123385] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526fa273-5d63-3619-906c-1ef95e8b550c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.156912] env[63379]: DEBUG nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1780.276636] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5026028-ff9b-4bcd-bdb6-c1ae0a7e8f8f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.288705] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1adc9e-b201-4a7c-9625-6d893b0e1c87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.324416] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81212cc-0c57-4af6-8426-85f9977fe631 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.334920] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dc0fd3-d2ef-497a-9f48-878072d04638 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.338633] env[63379]: DEBUG oslo_vmware.api [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780021, 'name': PowerOnVM_Task, 'duration_secs': 0.479171} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.338902] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1780.339143] env[63379]: INFO nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Took 7.28 seconds to spawn the instance on the hypervisor. [ 1780.339345] env[63379]: DEBUG nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1780.340383] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec35423a-98d9-4f64-b0e2-6101f65a6eda {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.350745] env[63379]: DEBUG nova.compute.provider_tree [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.433879] env[63379]: DEBUG nova.network.neutron [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Updated VIF entry in instance network info cache for port ec9d8d4a-76e2-4945-ac09-1e225b358218. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1780.434344] env[63379]: DEBUG nova.network.neutron [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Updating instance_info_cache with network_info: [{"id": "ec9d8d4a-76e2-4945-ac09-1e225b358218", "address": "fa:16:3e:0e:07:8c", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec9d8d4a-76", "ovs_interfaceid": "ec9d8d4a-76e2-4945-ac09-1e225b358218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.471208] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Skipping network cache update for instance because it is Building. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9987}} [ 1780.471467] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Skipping network cache update for instance because it is Building. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9987}} [ 1780.471606] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Skipping network cache update for instance because it is Building. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9987}} [ 1780.484697] env[63379]: DEBUG nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1780.525640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.525795] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.525944] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1780.526113] env[63379]: DEBUG nova.objects.instance [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lazy-loading 'info_cache' on Instance uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1780.608740] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1780.609054] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-186f988c-fb8d-43ca-8c6c-e15ec8597de1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.615942] env[63379]: DEBUG oslo_vmware.api [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1780.615942] env[63379]: value = "task-1780022" [ 1780.615942] env[63379]: _type = "Task" [ 1780.615942] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.627515] env[63379]: DEBUG oslo_vmware.api [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.632038] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526fa273-5d63-3619-906c-1ef95e8b550c, 'name': SearchDatastore_Task, 'duration_secs': 0.010466} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.632038] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.632038] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1780.632316] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.632362] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.632522] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1780.632791] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdecc4ad-53a8-4a78-bd85-390a0afd9ba4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.641463] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1780.641637] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1780.642376] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4045776-2a6d-4117-a032-33758a1c6780 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.647660] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1780.647660] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf255f-8363-fed2-cbbe-5453a5577029" [ 1780.647660] env[63379]: _type = "Task" [ 1780.647660] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.657157] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf255f-8363-fed2-cbbe-5453a5577029, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.675875] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.828832] env[63379]: DEBUG nova.network.neutron [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Successfully created port: 844ccd54-2ca6-4d56-a26f-eea2683c760c {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1780.854591] env[63379]: DEBUG nova.scheduler.client.report [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.874022] env[63379]: INFO nova.compute.manager [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Took 16.56 seconds to build instance. [ 1780.938046] env[63379]: DEBUG oslo_concurrency.lockutils [req-4dc20089-9c4a-455b-b35c-5a4c1b27d77e req-d1cd2db3-0e3e-41ef-b4c3-37f7e946fc61 service nova] Releasing lock "refresh_cache-9040201c-e1de-47d9-b9c2-b30c14e32749" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.130814] env[63379]: DEBUG oslo_vmware.api [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780022, 'name': PowerOffVM_Task, 'duration_secs': 0.405348} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.131104] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1781.131289] env[63379]: DEBUG nova.compute.manager [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1781.132067] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acefb6f7-e661-4a43-8f37-a7458a1e65c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.159491] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf255f-8363-fed2-cbbe-5453a5577029, 'name': SearchDatastore_Task, 'duration_secs': 0.009146} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.160223] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bae9b066-429b-4644-93d6-9a54a725c9f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.165783] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1781.165783] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5215afe8-e2fc-a200-1f27-ad80e1b753dc" [ 1781.165783] env[63379]: _type = "Task" [ 1781.165783] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.173949] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5215afe8-e2fc-a200-1f27-ad80e1b753dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.371535] env[63379]: DEBUG oslo_concurrency.lockutils [None req-975a3207-a985-430e-a9bb-a0d48415b1eb tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.074s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.493571] env[63379]: DEBUG nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1781.522385] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1781.522652] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1781.522812] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1781.523008] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1781.523164] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1781.523313] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1781.523527] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1781.523686] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1781.523856] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1781.524115] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1781.524333] env[63379]: DEBUG nova.virt.hardware [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1781.525233] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0753fd3-da46-435f-b576-962ecbc135b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.533405] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb01d644-e9f7-474f-892b-55c93fe12d19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.646569] env[63379]: DEBUG oslo_concurrency.lockutils [None req-196f14c4-a3b2-4aec-b0b5-7c6e3722e85b tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.562s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.677217] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5215afe8-e2fc-a200-1f27-ad80e1b753dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009461} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.677509] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.677768] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1781.678046] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39e08d6f-2d17-4b83-8700-bd9811d03e88 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.684935] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1781.684935] env[63379]: value = "task-1780023" [ 1781.684935] env[63379]: _type = "Task" [ 1781.684935] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.692980] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.696466] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "2254844f-b1f9-435e-ac8a-f114f05331e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.696697] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.696937] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "2254844f-b1f9-435e-ac8a-f114f05331e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.697151] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.697325] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.699418] env[63379]: INFO nova.compute.manager [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Terminating instance [ 1781.702778] env[63379]: DEBUG nova.compute.manager [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1781.702983] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1781.707030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea9290e-58ee-4754-9ca5-9afd2b4c23b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.712584] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1781.712830] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-821cf3e6-e34d-4073-a03b-7b655bd6a3ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.724362] env[63379]: DEBUG oslo_vmware.api [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1781.724362] env[63379]: value = "task-1780024" [ 1781.724362] env[63379]: _type = "Task" [ 1781.724362] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.733431] env[63379]: DEBUG oslo_vmware.api [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780024, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.869784] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.394s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.874169] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.255s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.874576] env[63379]: DEBUG nova.objects.instance [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lazy-loading 'resources' on Instance uuid b9bc2562-9475-400e-9cf9-646b8f4c8cf2 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1782.197800] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780023, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.237300] env[63379]: DEBUG oslo_vmware.api [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780024, 'name': PowerOffVM_Task, 'duration_secs': 0.190037} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.237699] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1782.237988] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1782.238428] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5576dabb-3786-425b-b9ea-833f0270fdca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.342980] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1782.343263] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1782.343600] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Deleting the datastore file [datastore1] 2254844f-b1f9-435e-ac8a-f114f05331e1 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1782.343975] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58dd34a8-c717-47e5-bd38-df0ee38fea73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.351744] env[63379]: DEBUG oslo_vmware.api [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for the task: (returnval){ [ 1782.351744] env[63379]: value = "task-1780026" [ 1782.351744] env[63379]: _type = "Task" [ 1782.351744] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.360495] env[63379]: DEBUG oslo_vmware.api [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.428293] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.456992] env[63379]: DEBUG nova.compute.manager [req-d5970b74-7c47-458f-916e-5b0d1031cefd req-36e0d4fc-c3bb-4910-9084-0e9fc51d7f5a service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Received event network-vif-plugged-844ccd54-2ca6-4d56-a26f-eea2683c760c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1782.457375] env[63379]: DEBUG oslo_concurrency.lockutils [req-d5970b74-7c47-458f-916e-5b0d1031cefd req-36e0d4fc-c3bb-4910-9084-0e9fc51d7f5a service nova] Acquiring lock "cb62192b-63db-40d0-97bb-1df171ade64b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.457691] env[63379]: DEBUG oslo_concurrency.lockutils [req-d5970b74-7c47-458f-916e-5b0d1031cefd req-36e0d4fc-c3bb-4910-9084-0e9fc51d7f5a service nova] Lock "cb62192b-63db-40d0-97bb-1df171ade64b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.457962] env[63379]: DEBUG oslo_concurrency.lockutils [req-d5970b74-7c47-458f-916e-5b0d1031cefd req-36e0d4fc-c3bb-4910-9084-0e9fc51d7f5a service nova] Lock "cb62192b-63db-40d0-97bb-1df171ade64b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.459759] env[63379]: DEBUG nova.compute.manager [req-d5970b74-7c47-458f-916e-5b0d1031cefd req-36e0d4fc-c3bb-4910-9084-0e9fc51d7f5a service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] No waiting events found dispatching network-vif-plugged-844ccd54-2ca6-4d56-a26f-eea2683c760c {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1782.460056] env[63379]: WARNING nova.compute.manager [req-d5970b74-7c47-458f-916e-5b0d1031cefd req-36e0d4fc-c3bb-4910-9084-0e9fc51d7f5a service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Received unexpected event network-vif-plugged-844ccd54-2ca6-4d56-a26f-eea2683c760c for instance with vm_state building and task_state spawning. [ 1782.462620] env[63379]: INFO nova.scheduler.client.report [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted allocation for migration 26f6c1b9-5396-465b-824d-55a899bd9ddd [ 1782.496404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.496404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.496404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "0f248290-a14c-4c76-98b3-4efa5bda5f05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.496404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.496404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.497887] env[63379]: INFO nova.compute.manager [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Terminating instance [ 1782.501032] env[63379]: DEBUG nova.compute.manager [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1782.501032] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1782.501990] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86bb1b0-a30e-4ebc-abb8-5f0b5ae38d51 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.520043] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1782.520323] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1eb378d9-c253-4c58-ac9b-ede262626106 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.601576] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1782.601887] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1782.602050] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleting the datastore file [datastore1] 0f248290-a14c-4c76-98b3-4efa5bda5f05 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1782.602315] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71922f7d-bc65-4fd9-9c97-d537299a35e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.608507] env[63379]: DEBUG oslo_vmware.api [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1782.608507] env[63379]: value = "task-1780028" [ 1782.608507] env[63379]: _type = "Task" [ 1782.608507] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.618629] env[63379]: DEBUG oslo_vmware.api [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.618830] env[63379]: DEBUG nova.network.neutron [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Successfully updated port: 844ccd54-2ca6-4d56-a26f-eea2683c760c {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1782.682246] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252b82d3-8e92-4385-b190-c269881978d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.692467] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba080f6-b5fa-4b16-a3cb-43f19c5ab054 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.698906] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640609} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.699788] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1782.699788] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1782.699961] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8679dcc-c04f-402e-be36-1df9338a9c01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.732043] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e9ef1d-c96e-42bd-94c3-978de343c2d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.734935] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1782.734935] env[63379]: value = "task-1780029" [ 1782.734935] env[63379]: _type = "Task" [ 1782.734935] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.742081] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b8a4ab-4328-4fe5-9cc0-b732c5951e1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.749103] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.758914] env[63379]: DEBUG nova.compute.provider_tree [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1782.861288] env[63379]: DEBUG oslo_vmware.api [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Task: {'id': task-1780026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15414} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.861510] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1782.861705] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1782.861922] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1782.862073] env[63379]: INFO nova.compute.manager [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1782.862329] env[63379]: DEBUG oslo.service.loopingcall [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.862525] env[63379]: DEBUG nova.compute.manager [-] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1782.862622] env[63379]: DEBUG nova.network.neutron [-] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1782.931067] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.931313] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 1782.931529] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.931732] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.931826] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.931971] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.932138] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.932268] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1782.932413] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.976115] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b65f79af-290c-48f0-aaa7-8467ee951d21 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.094s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.118864] env[63379]: DEBUG oslo_vmware.api [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198049} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.119154] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1783.119341] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1783.119521] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1783.119701] env[63379]: INFO nova.compute.manager [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1783.119956] env[63379]: DEBUG oslo.service.loopingcall [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1783.120193] env[63379]: DEBUG nova.compute.manager [-] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1783.120309] env[63379]: DEBUG nova.network.neutron [-] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1783.122106] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "refresh_cache-cb62192b-63db-40d0-97bb-1df171ade64b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.122242] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired lock "refresh_cache-cb62192b-63db-40d0-97bb-1df171ade64b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.122380] env[63379]: DEBUG nova.network.neutron [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1783.244511] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090599} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.244863] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1783.245704] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d42725-3139-4de7-8997-d2c18f551745 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.268906] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1783.272121] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df0329b0-1838-4e6d-8427-31c94b8bf2c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.292998] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1783.292998] env[63379]: value = "task-1780030" [ 1783.292998] env[63379]: _type = "Task" [ 1783.292998] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.306775] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780030, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.306775] env[63379]: ERROR nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [req-d69702ed-02c1-4b84-b365-91c1c7000cea] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d69702ed-02c1-4b84-b365-91c1c7000cea"}]} [ 1783.326583] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1783.352262] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1783.352467] env[63379]: DEBUG nova.compute.provider_tree [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1783.365355] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1783.386574] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1783.436198] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.581549] env[63379]: DEBUG nova.network.neutron [-] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.603087] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a89c74e-f428-4e02-907e-50ec74b069af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.611297] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b2aa38-bdf4-4c07-bb4a-c99e9b7abdb3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.643167] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465870c1-6dc5-4f91-bcca-730593684539 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.650900] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86babaed-d28d-4160-8685-fe4945d14966 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.664687] env[63379]: DEBUG nova.compute.provider_tree [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1783.671697] env[63379]: DEBUG nova.network.neutron [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1783.803617] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780030, 'name': ReconfigVM_Task, 'duration_secs': 0.27002} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.803900] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1783.804563] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9956811a-f914-4d37-ba51-890f369e6274 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.812732] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1783.812732] env[63379]: value = "task-1780031" [ 1783.812732] env[63379]: _type = "Task" [ 1783.812732] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.821422] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780031, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.825574] env[63379]: DEBUG nova.network.neutron [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Updating instance_info_cache with network_info: [{"id": "844ccd54-2ca6-4d56-a26f-eea2683c760c", "address": "fa:16:3e:a1:30:f4", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap844ccd54-2c", "ovs_interfaceid": "844ccd54-2ca6-4d56-a26f-eea2683c760c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.833403] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "eda684fa-1595-4985-beb7-c298049411bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.833651] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.833908] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "eda684fa-1595-4985-beb7-c298049411bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.834066] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.834241] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.836251] env[63379]: INFO nova.compute.manager [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Terminating instance [ 1783.838630] env[63379]: DEBUG nova.compute.manager [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1783.838834] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1783.839683] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be3bbf2-0466-4b1e-a9d4-c09bbed5583e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.847837] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1783.848100] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8be1146e-66e9-4f54-b049-690c51b51751 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.854103] env[63379]: DEBUG oslo_vmware.api [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1783.854103] env[63379]: value = "task-1780032" [ 1783.854103] env[63379]: _type = "Task" [ 1783.854103] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.862017] env[63379]: DEBUG oslo_vmware.api [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.898061] env[63379]: DEBUG nova.network.neutron [-] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.084529] env[63379]: INFO nova.compute.manager [-] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Took 1.22 seconds to deallocate network for instance. [ 1784.184162] env[63379]: ERROR nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [req-71b6d9d2-073d-40f9-b14f-e3d2c0e0db53] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-71b6d9d2-073d-40f9-b14f-e3d2c0e0db53"}]} [ 1784.199419] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1784.211797] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1784.212011] env[63379]: DEBUG nova.compute.provider_tree [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1784.221458] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1784.237971] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1784.305217] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.305492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.322792] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780031, 'name': Rename_Task, 'duration_secs': 0.138006} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.325245] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1784.325649] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f229741-2b46-4f5b-9eb1-d597be80c9f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.327751] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Releasing lock "refresh_cache-cb62192b-63db-40d0-97bb-1df171ade64b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.328056] env[63379]: DEBUG nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Instance network_info: |[{"id": "844ccd54-2ca6-4d56-a26f-eea2683c760c", "address": "fa:16:3e:a1:30:f4", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap844ccd54-2c", "ovs_interfaceid": "844ccd54-2ca6-4d56-a26f-eea2683c760c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1784.328438] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:30:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '844ccd54-2ca6-4d56-a26f-eea2683c760c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1784.335821] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Creating folder: Project (734449c322394434a93a7b427d8ed7e8). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1784.338679] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00564c80-d489-4c63-b902-58dc0ad2bf79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.341823] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1784.341823] env[63379]: value = "task-1780033" [ 1784.341823] env[63379]: _type = "Task" [ 1784.341823] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.347927] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Created folder: Project (734449c322394434a93a7b427d8ed7e8) in parent group-v369214. [ 1784.348186] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Creating folder: Instances. Parent ref: group-v369467. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1784.354827] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aeb49437-d46c-4931-b3f7-122a2f26354e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.356650] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.366465] env[63379]: DEBUG oslo_vmware.api [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780032, 'name': PowerOffVM_Task, 'duration_secs': 0.185338} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.367101] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1784.367101] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1784.367382] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a72397a-8949-4242-bd97-6fb622437439 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.372245] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Created folder: Instances in parent group-v369467. [ 1784.372537] env[63379]: DEBUG oslo.service.loopingcall [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1784.373261] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1784.373849] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a274dbc-4df5-4ba2-8fd8-db03f4d3adb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.396108] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1784.396108] env[63379]: value = "task-1780037" [ 1784.396108] env[63379]: _type = "Task" [ 1784.396108] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.401955] env[63379]: INFO nova.compute.manager [-] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Took 1.28 seconds to deallocate network for instance. [ 1784.406613] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780037, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.486474] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dcbb3a-5fa9-42a2-9fbb-8d59cac95ca8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.492279] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1784.492538] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1784.492771] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleting the datastore file [datastore1] eda684fa-1595-4985-beb7-c298049411bf {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1784.494516] env[63379]: DEBUG nova.compute.manager [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Received event network-changed-844ccd54-2ca6-4d56-a26f-eea2683c760c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1784.494758] env[63379]: DEBUG nova.compute.manager [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Refreshing instance network info cache due to event network-changed-844ccd54-2ca6-4d56-a26f-eea2683c760c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1784.495057] env[63379]: DEBUG oslo_concurrency.lockutils [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] Acquiring lock "refresh_cache-cb62192b-63db-40d0-97bb-1df171ade64b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.495291] env[63379]: DEBUG oslo_concurrency.lockutils [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] Acquired lock "refresh_cache-cb62192b-63db-40d0-97bb-1df171ade64b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.495532] env[63379]: DEBUG nova.network.neutron [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Refreshing network info cache for port 844ccd54-2ca6-4d56-a26f-eea2683c760c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1784.497836] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4c1427a-29a8-4f19-9a99-8c0bb9e8f11b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.503838] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783e25cc-a4a5-4e63-a565-88cf22a9098e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.508970] env[63379]: DEBUG oslo_vmware.api [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1784.508970] env[63379]: value = "task-1780038" [ 1784.508970] env[63379]: _type = "Task" [ 1784.508970] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.544383] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f9c963-64af-4d26-8ce4-1edac56633d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.550764] env[63379]: DEBUG oslo_vmware.api [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.556380] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0069d68-e5d1-4ee7-8025-44256a410038 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.571041] env[63379]: DEBUG nova.compute.provider_tree [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1784.590681] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.807920] env[63379]: DEBUG nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1784.852499] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780033, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.909042] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780037, 'name': CreateVM_Task, 'duration_secs': 0.442051} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.909229] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1784.910492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.910492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.910637] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1784.912051] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.912051] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3a7340b-5471-4d44-a762-e66854e66834 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.917073] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1784.917073] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d6bb2a-9cd5-f2db-d0f3-ab07bf97c348" [ 1784.917073] env[63379]: _type = "Task" [ 1784.917073] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.925652] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d6bb2a-9cd5-f2db-d0f3-ab07bf97c348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.019270] env[63379]: DEBUG oslo_vmware.api [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204328} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.019692] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1785.019913] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1785.020133] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1785.020296] env[63379]: INFO nova.compute.manager [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: eda684fa-1595-4985-beb7-c298049411bf] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1785.020540] env[63379]: DEBUG oslo.service.loopingcall [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.020784] env[63379]: DEBUG nova.compute.manager [-] [instance: eda684fa-1595-4985-beb7-c298049411bf] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1785.020850] env[63379]: DEBUG nova.network.neutron [-] [instance: eda684fa-1595-4985-beb7-c298049411bf] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1785.107857] env[63379]: DEBUG nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 119 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1785.108160] env[63379]: DEBUG nova.compute.provider_tree [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 119 to 120 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1785.108346] env[63379]: DEBUG nova.compute.provider_tree [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1785.338156] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.352200] env[63379]: DEBUG oslo_vmware.api [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780033, 'name': PowerOnVM_Task, 'duration_secs': 0.563416} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.352482] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1785.352690] env[63379]: INFO nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1785.352874] env[63379]: DEBUG nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1785.353669] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3f1179-6076-4536-97a0-2e846c04295b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.397720] env[63379]: DEBUG nova.network.neutron [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Updated VIF entry in instance network info cache for port 844ccd54-2ca6-4d56-a26f-eea2683c760c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1785.398106] env[63379]: DEBUG nova.network.neutron [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Updating instance_info_cache with network_info: [{"id": "844ccd54-2ca6-4d56-a26f-eea2683c760c", "address": "fa:16:3e:a1:30:f4", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap844ccd54-2c", "ovs_interfaceid": "844ccd54-2ca6-4d56-a26f-eea2683c760c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.427354] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d6bb2a-9cd5-f2db-d0f3-ab07bf97c348, 'name': SearchDatastore_Task, 'duration_secs': 0.014987} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.427665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.427902] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1785.428154] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.428308] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.428541] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1785.429226] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39d3f95e-e537-4090-bbfd-2f7c76490b93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.442152] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1785.442354] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1785.443125] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24168af5-c6f9-48eb-8e7f-ed9ace7540f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.448585] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1785.448585] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f0993b-5fb6-c431-5b3c-d9b215a2da5b" [ 1785.448585] env[63379]: _type = "Task" [ 1785.448585] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.456166] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f0993b-5fb6-c431-5b3c-d9b215a2da5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.613855] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.740s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.616926] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.940s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.617909] env[63379]: INFO nova.compute.claims [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1785.632364] env[63379]: INFO nova.scheduler.client.report [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleted allocations for instance b9bc2562-9475-400e-9cf9-646b8f4c8cf2 [ 1785.871017] env[63379]: INFO nova.compute.manager [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Took 20.14 seconds to build instance. [ 1785.900363] env[63379]: DEBUG oslo_concurrency.lockutils [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] Releasing lock "refresh_cache-cb62192b-63db-40d0-97bb-1df171ade64b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.900492] env[63379]: DEBUG nova.compute.manager [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Received event network-vif-deleted-7de015d0-7fb7-43e2-87cb-c9cafb6e4e26 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1785.900666] env[63379]: DEBUG nova.compute.manager [req-e4b451c3-5794-4539-ba91-d86fa00bc026 req-8da22043-ed5c-4352-9ee7-a4f13dfe46f7 service nova] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Received event network-vif-deleted-1e94cb02-aa72-4461-85e0-63ff98d54c0e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1785.921882] env[63379]: DEBUG nova.network.neutron [-] [instance: eda684fa-1595-4985-beb7-c298049411bf] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.959201] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f0993b-5fb6-c431-5b3c-d9b215a2da5b, 'name': SearchDatastore_Task, 'duration_secs': 0.029676} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.959961] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17441ee4-3978-4d68-bf30-2b76fe67a793 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.965007] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1785.965007] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5200a2df-8b94-ce0f-0ff6-a68b4261a513" [ 1785.965007] env[63379]: _type = "Task" [ 1785.965007] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.972409] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5200a2df-8b94-ce0f-0ff6-a68b4261a513, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.140632] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c03bc56-5784-4a81-bd22-da0770f66795 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "b9bc2562-9475-400e-9cf9-646b8f4c8cf2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.625s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.373417] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1ffac7ae-3ed6-40aa-be3c-fe4e38a6d022 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.650s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.425083] env[63379]: INFO nova.compute.manager [-] [instance: eda684fa-1595-4985-beb7-c298049411bf] Took 1.40 seconds to deallocate network for instance. [ 1786.476239] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5200a2df-8b94-ce0f-0ff6-a68b4261a513, 'name': SearchDatastore_Task, 'duration_secs': 0.008784} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.476618] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.476974] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] cb62192b-63db-40d0-97bb-1df171ade64b/cb62192b-63db-40d0-97bb-1df171ade64b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1786.477262] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-594cd596-b35d-444e-b232-ef916a34c0ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.484464] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1786.484464] env[63379]: value = "task-1780039" [ 1786.484464] env[63379]: _type = "Task" [ 1786.484464] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.492173] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.520239] env[63379]: DEBUG nova.compute.manager [req-a5d65662-73fc-43f3-9706-24f4f382392d req-6d9725d1-00d8-4b97-af77-790b86127cf5 service nova] [instance: eda684fa-1595-4985-beb7-c298049411bf] Received event network-vif-deleted-82d4cc07-9772-4f7e-87ba-1ef653e88fd3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1786.757032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.757177] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.757397] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.757582] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.757793] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.760298] env[63379]: INFO nova.compute.manager [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Terminating instance [ 1786.762232] env[63379]: DEBUG nova.compute.manager [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1786.762369] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1786.763182] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26f924d-2e6a-425f-bfc0-ec7e79995eb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.773420] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1786.774115] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31dfd726-3c07-40df-8a09-59ce810c05bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.781944] env[63379]: DEBUG oslo_vmware.api [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1786.781944] env[63379]: value = "task-1780040" [ 1786.781944] env[63379]: _type = "Task" [ 1786.781944] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.790334] env[63379]: DEBUG oslo_vmware.api [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.871568] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2ae360-65a7-4ac9-a1b0-42ad03a1b5a5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.879548] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f99c04-d892-4ccc-b1cb-8b40de018405 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.914150] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6942f57-65f3-4518-954c-46cb47a91433 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.917970] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "9040201c-e1de-47d9-b9c2-b30c14e32749" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.918202] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.918416] env[63379]: DEBUG nova.compute.manager [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1786.919154] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a5acc1-599f-4cee-b0b3-615d6ea482a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.927994] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1cb0c7a-8f63-41ab-88ab-2c6e80aec823 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.931823] env[63379]: DEBUG nova.compute.manager [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1786.932414] env[63379]: DEBUG nova.objects.instance [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'flavor' on Instance uuid 9040201c-e1de-47d9-b9c2-b30c14e32749 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1786.934433] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.947810] env[63379]: DEBUG nova.compute.provider_tree [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1786.995802] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505191} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.996138] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] cb62192b-63db-40d0-97bb-1df171ade64b/cb62192b-63db-40d0-97bb-1df171ade64b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1786.996367] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1786.996618] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35b35062-f85a-4d15-a27a-8872b788cac9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.002274] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1787.002274] env[63379]: value = "task-1780041" [ 1787.002274] env[63379]: _type = "Task" [ 1787.002274] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.009784] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780041, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.292696] env[63379]: DEBUG oslo_vmware.api [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780040, 'name': PowerOffVM_Task, 'duration_secs': 0.393111} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.293067] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1787.293294] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1787.293581] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec9db86b-10f5-4bcf-9032-f31b803d7e85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.388587] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1787.388828] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1787.389073] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleting the datastore file [datastore1] c900bb90-b4a8-40a2-9436-5a0ced1dd919 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1787.389427] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af4b2b2d-0bd9-407c-a00a-4401b229a083 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.397363] env[63379]: DEBUG oslo_vmware.api [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for the task: (returnval){ [ 1787.397363] env[63379]: value = "task-1780043" [ 1787.397363] env[63379]: _type = "Task" [ 1787.397363] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.404933] env[63379]: DEBUG oslo_vmware.api [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.437774] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1787.438142] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b26d8d7-dab9-4b9a-980c-f8bc2e681e1d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.445566] env[63379]: DEBUG oslo_vmware.api [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1787.445566] env[63379]: value = "task-1780044" [ 1787.445566] env[63379]: _type = "Task" [ 1787.445566] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.457312] env[63379]: DEBUG oslo_vmware.api [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.480994] env[63379]: DEBUG nova.scheduler.client.report [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1787.481334] env[63379]: DEBUG nova.compute.provider_tree [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 120 to 121 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1787.481525] env[63379]: DEBUG nova.compute.provider_tree [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1787.512455] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780041, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064685} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.512712] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1787.513507] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f6a7e6-3536-42d1-bebd-871426b7d767 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.536349] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] cb62192b-63db-40d0-97bb-1df171ade64b/cb62192b-63db-40d0-97bb-1df171ade64b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1787.536987] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60a59042-b5ee-4e0e-9737-bf2a6a94bb61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.556881] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1787.556881] env[63379]: value = "task-1780045" [ 1787.556881] env[63379]: _type = "Task" [ 1787.556881] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.565207] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780045, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.906934] env[63379]: DEBUG oslo_vmware.api [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Task: {'id': task-1780043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.244422} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.907231] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1787.907439] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1787.907625] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1787.907820] env[63379]: INFO nova.compute.manager [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1787.908098] env[63379]: DEBUG oslo.service.loopingcall [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1787.908310] env[63379]: DEBUG nova.compute.manager [-] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1787.908484] env[63379]: DEBUG nova.network.neutron [-] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1787.956595] env[63379]: DEBUG oslo_vmware.api [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780044, 'name': PowerOffVM_Task, 'duration_secs': 0.209554} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.956922] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1787.957136] env[63379]: DEBUG nova.compute.manager [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1787.957960] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cce0db9-c054-44f1-b462-097dfedcdc3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.986952] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.987517] env[63379]: DEBUG nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1787.990145] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.554s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.990348] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.990526] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1787.990839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.400s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.991070] env[63379]: DEBUG nova.objects.instance [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lazy-loading 'resources' on Instance uuid 2254844f-b1f9-435e-ac8a-f114f05331e1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1787.992567] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8023eb2-314e-41df-9f0c-7fa0fc2770c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.003931] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd73bc3c-8e06-410c-8bf4-408b55a7e85c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.019281] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f986a6e9-95d9-421d-a923-e722437f2956 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.026224] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6b2696-4244-4e31-ab1b-255470bcb5c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.056012] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179481MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1788.056174] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.065727] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780045, 'name': ReconfigVM_Task, 'duration_secs': 0.350796} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.066010] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Reconfigured VM instance instance-0000005c to attach disk [datastore1] cb62192b-63db-40d0-97bb-1df171ade64b/cb62192b-63db-40d0-97bb-1df171ade64b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1788.066620] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40d83884-f878-466c-b3ce-d1fc9f5e1ddc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.072801] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1788.072801] env[63379]: value = "task-1780046" [ 1788.072801] env[63379]: _type = "Task" [ 1788.072801] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.080457] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780046, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.470496] env[63379]: DEBUG oslo_concurrency.lockutils [None req-158fc0fe-fa0e-41b3-9d70-d5e19e8ddf20 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.503032] env[63379]: DEBUG nova.compute.utils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1788.505382] env[63379]: DEBUG nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1788.505382] env[63379]: DEBUG nova.network.neutron [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1788.565517] env[63379]: DEBUG nova.policy [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1788.575233] env[63379]: DEBUG nova.compute.manager [req-433e8f0a-3a5b-46bf-b011-40a70c259d0f req-e30f7df8-2302-4d46-a8dc-f5a3677ca2a0 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Received event network-vif-deleted-1f122953-4fde-41ae-9895-0ef67cacb236 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1788.575435] env[63379]: INFO nova.compute.manager [req-433e8f0a-3a5b-46bf-b011-40a70c259d0f req-e30f7df8-2302-4d46-a8dc-f5a3677ca2a0 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Neutron deleted interface 1f122953-4fde-41ae-9895-0ef67cacb236; detaching it from the instance and deleting it from the info cache [ 1788.575608] env[63379]: DEBUG nova.network.neutron [req-433e8f0a-3a5b-46bf-b011-40a70c259d0f req-e30f7df8-2302-4d46-a8dc-f5a3677ca2a0 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.591222] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780046, 'name': Rename_Task, 'duration_secs': 0.145515} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.591222] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1788.591222] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f2df3c1-bed4-4fac-95cb-ce79af29381a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.597579] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1788.597579] env[63379]: value = "task-1780047" [ 1788.597579] env[63379]: _type = "Task" [ 1788.597579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.609898] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780047, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.629259] env[63379]: DEBUG nova.network.neutron [-] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.752917] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b13ef4d-4afe-4c26-a28d-2df45db97fd6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.764443] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ab29e1-4e95-489a-bb52-5922f7cd7fd9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.799040] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d13972-6480-4808-851b-77cc6533ea23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.807259] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97dea70-716b-47c8-a19a-f5bb39214436 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.821925] env[63379]: DEBUG nova.compute.provider_tree [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1788.879462] env[63379]: DEBUG nova.network.neutron [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Successfully created port: c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1789.008256] env[63379]: DEBUG nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1789.082498] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ca753dd-0eca-485e-8286-91aeb283d60f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.091020] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e18d78d-1f07-4213-a87a-82faba471193 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.111909] env[63379]: DEBUG oslo_vmware.api [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780047, 'name': PowerOnVM_Task, 'duration_secs': 0.441512} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.112245] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1789.112450] env[63379]: INFO nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Took 7.62 seconds to spawn the instance on the hypervisor. [ 1789.112631] env[63379]: DEBUG nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1789.113403] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aea0273-9b20-4a3f-80b8-47523d8f0933 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.125746] env[63379]: DEBUG nova.compute.manager [req-433e8f0a-3a5b-46bf-b011-40a70c259d0f req-e30f7df8-2302-4d46-a8dc-f5a3677ca2a0 service nova] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Detach interface failed, port_id=1f122953-4fde-41ae-9895-0ef67cacb236, reason: Instance c900bb90-b4a8-40a2-9436-5a0ced1dd919 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1789.132683] env[63379]: INFO nova.compute.manager [-] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Took 1.22 seconds to deallocate network for instance. [ 1789.324464] env[63379]: DEBUG nova.scheduler.client.report [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1789.383258] env[63379]: INFO nova.compute.manager [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Rebuilding instance [ 1789.428779] env[63379]: DEBUG nova.compute.manager [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1789.429699] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f53aec9-1229-48e4-9d71-727ab123f208 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.480653] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.480653] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.639140] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.640628] env[63379]: INFO nova.compute.manager [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Took 19.46 seconds to build instance. [ 1789.829193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.838s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.831699] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.920s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.831948] env[63379]: DEBUG nova.objects.instance [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'resources' on Instance uuid 0f248290-a14c-4c76-98b3-4efa5bda5f05 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1789.848543] env[63379]: INFO nova.scheduler.client.report [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Deleted allocations for instance 2254844f-b1f9-435e-ac8a-f114f05331e1 [ 1789.942222] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1789.942555] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9afb7a7-c67b-4872-a27f-8941b84ab868 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.952925] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1789.952925] env[63379]: value = "task-1780048" [ 1789.952925] env[63379]: _type = "Task" [ 1789.952925] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.966387] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1789.966774] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1789.967973] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72de0d2-5af7-44cd-a108-d53feed587df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.978993] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1789.979417] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5768cf07-6a1a-49b3-ad85-b3b0f5f433b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.984035] env[63379]: INFO nova.compute.manager [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Detaching volume 3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef [ 1790.022900] env[63379]: DEBUG nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1790.027467] env[63379]: INFO nova.virt.block_device [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Attempting to driver detach volume 3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef from mountpoint /dev/sdb [ 1790.027863] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1790.028258] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369451', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'name': 'volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d76a28f-822d-4b4f-be2f-2ad3371b3979', 'attached_at': '', 'detached_at': '', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'serial': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1790.029986] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3354dedc-63ba-4d9b-89e6-bc9b31bbd2d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.065832] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "19a41941-0679-4971-8a44-c95b13f5c294" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.066254] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.072960] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30cc6e25-9d68-4b66-abcb-9b57d04506db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.087236] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.087653] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.087960] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.088317] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.089159] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.089464] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.089843] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.090157] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.090483] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.090774] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.091107] env[63379]: DEBUG nova.virt.hardware [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.093120] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd12e3d-798d-45c3-b7f3-f2ca6891ef0a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.098205] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63e066a-e474-490d-88dc-f70b22347324 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.105998] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1790.106386] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1790.106707] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleting the datastore file [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1790.110737] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7698439-492a-4c9f-b8e8-1316b109d702 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.144226] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5eb69c-4541-420f-97ab-c2dacc506cd3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.151699] env[63379]: DEBUG oslo_concurrency.lockutils [None req-156c2418-58c3-42e1-ad0d-50d29b965e06 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "cb62192b-63db-40d0-97bb-1df171ade64b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.980s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.153460] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed94f55e-a03e-4b1a-9cca-68dad5efe4d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.159662] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1790.159662] env[63379]: value = "task-1780050" [ 1790.159662] env[63379]: _type = "Task" [ 1790.159662] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.196440] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Acquiring lock "cb62192b-63db-40d0-97bb-1df171ade64b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.196829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Lock "cb62192b-63db-40d0-97bb-1df171ade64b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.197442] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Acquiring lock "cb62192b-63db-40d0-97bb-1df171ade64b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.197442] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Lock "cb62192b-63db-40d0-97bb-1df171ade64b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.197652] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Lock "cb62192b-63db-40d0-97bb-1df171ade64b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.200393] env[63379]: INFO nova.compute.manager [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Terminating instance [ 1790.203018] env[63379]: DEBUG nova.compute.manager [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1790.203361] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1790.204286] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] The volume has not been displaced from its original location: [datastore1] volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef/volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1790.210346] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Reconfiguring VM instance instance-00000034 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1790.212040] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb846d1-cf7a-475c-aadb-dd77c7cd9402 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.214899] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e437c9e-7e06-4385-9032-c40380e6abe8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.231809] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.236694] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1790.237995] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-957351ae-f5d7-40f8-a312-1db1f8b7d9a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.239699] env[63379]: DEBUG oslo_vmware.api [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1790.239699] env[63379]: value = "task-1780051" [ 1790.239699] env[63379]: _type = "Task" [ 1790.239699] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.244119] env[63379]: DEBUG oslo_vmware.api [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Waiting for the task: (returnval){ [ 1790.244119] env[63379]: value = "task-1780052" [ 1790.244119] env[63379]: _type = "Task" [ 1790.244119] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.250770] env[63379]: DEBUG oslo_vmware.api [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780051, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.257079] env[63379]: DEBUG oslo_vmware.api [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Task: {'id': task-1780052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.358868] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8a904183-11cd-4401-a36b-3d6def78d9a0 tempest-ServersNegativeTestMultiTenantJSON-2135592504 tempest-ServersNegativeTestMultiTenantJSON-2135592504-project-member] Lock "2254844f-b1f9-435e-ac8a-f114f05331e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.662s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.570183] env[63379]: INFO nova.compute.manager [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Detaching volume 9d889203-dc27-4007-a5c2-f62dd5709f2f [ 1790.573673] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a3f758-2bce-4621-8cad-eca4aa0c0e7f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.584888] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc5ab1a-d69d-457b-892a-577bffbdb063 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.618328] env[63379]: INFO nova.virt.block_device [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Attempting to driver detach volume 9d889203-dc27-4007-a5c2-f62dd5709f2f from mountpoint /dev/sdb [ 1790.618599] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1790.618798] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369441', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'name': 'volume-9d889203-dc27-4007-a5c2-f62dd5709f2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19a41941-0679-4971-8a44-c95b13f5c294', 'attached_at': '', 'detached_at': '', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'serial': '9d889203-dc27-4007-a5c2-f62dd5709f2f'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1790.619622] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f236b0d9-b2e4-43c2-a1d8-d44f60e8b995 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.622677] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054ec2b7-5b10-4225-8284-39becdb8ecab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.651723] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c10d28d-aee6-4267-866c-3037f419e305 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.660329] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857e2130-1850-4282-aa93-8874fbd77fa5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.682221] env[63379]: DEBUG nova.compute.provider_tree [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.686722] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352c77ff-ab67-4c12-8556-76e992e1d785 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.692441] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211285} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.693760] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1790.694060] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1790.694308] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1790.714323] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299c32bc-5e47-443c-8283-88138c981e99 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.730863] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] The volume has not been displaced from its original location: [datastore1] volume-9d889203-dc27-4007-a5c2-f62dd5709f2f/volume-9d889203-dc27-4007-a5c2-f62dd5709f2f.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1790.737058] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfiguring VM instance instance-0000003e to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1790.737441] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca27e107-3953-4a13-9e13-7b7393f9193c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.764484] env[63379]: DEBUG oslo_vmware.api [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Task: {'id': task-1780052, 'name': PowerOffVM_Task, 'duration_secs': 0.240023} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.768475] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1790.769503] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1790.769865] env[63379]: DEBUG oslo_vmware.api [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780051, 'name': ReconfigVM_Task, 'duration_secs': 0.278118} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.770101] env[63379]: DEBUG oslo_vmware.api [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1790.770101] env[63379]: value = "task-1780053" [ 1790.770101] env[63379]: _type = "Task" [ 1790.770101] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.770305] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a784303-a2e3-4113-bb21-ef2a4c184edb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.771864] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Reconfigured VM instance instance-00000034 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1790.776835] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c68fbc9-6784-4ea1-bf15-266c8bb6b280 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.796736] env[63379]: DEBUG oslo_vmware.api [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.798218] env[63379]: DEBUG oslo_vmware.api [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1790.798218] env[63379]: value = "task-1780055" [ 1790.798218] env[63379]: _type = "Task" [ 1790.798218] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.807096] env[63379]: DEBUG oslo_vmware.api [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780055, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.909927] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1790.910186] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1790.910379] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Deleting the datastore file [datastore1] cb62192b-63db-40d0-97bb-1df171ade64b {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1790.910666] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9f6b0bd-b77d-4774-b079-97092e600bc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.917541] env[63379]: DEBUG oslo_vmware.api [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Waiting for the task: (returnval){ [ 1790.917541] env[63379]: value = "task-1780056" [ 1790.917541] env[63379]: _type = "Task" [ 1790.917541] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.927117] env[63379]: DEBUG oslo_vmware.api [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Task: {'id': task-1780056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.045842] env[63379]: DEBUG nova.compute.manager [req-990eff91-5ac9-4b54-be06-3f495325b2ca req-f5574b00-3784-4a23-8df4-f379de5c06ac service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-vif-plugged-c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1791.046227] env[63379]: DEBUG oslo_concurrency.lockutils [req-990eff91-5ac9-4b54-be06-3f495325b2ca req-f5574b00-3784-4a23-8df4-f379de5c06ac service nova] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.046582] env[63379]: DEBUG oslo_concurrency.lockutils [req-990eff91-5ac9-4b54-be06-3f495325b2ca req-f5574b00-3784-4a23-8df4-f379de5c06ac service nova] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.047041] env[63379]: DEBUG oslo_concurrency.lockutils [req-990eff91-5ac9-4b54-be06-3f495325b2ca req-f5574b00-3784-4a23-8df4-f379de5c06ac service nova] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.047399] env[63379]: DEBUG nova.compute.manager [req-990eff91-5ac9-4b54-be06-3f495325b2ca req-f5574b00-3784-4a23-8df4-f379de5c06ac service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] No waiting events found dispatching network-vif-plugged-c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1791.047602] env[63379]: WARNING nova.compute.manager [req-990eff91-5ac9-4b54-be06-3f495325b2ca req-f5574b00-3784-4a23-8df4-f379de5c06ac service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received unexpected event network-vif-plugged-c2313903-6e4e-42f8-be0f-3c00be1c0fec for instance with vm_state building and task_state spawning. [ 1791.132782] env[63379]: DEBUG nova.network.neutron [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Successfully updated port: c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1791.189349] env[63379]: DEBUG nova.scheduler.client.report [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1791.286332] env[63379]: DEBUG oslo_vmware.api [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780053, 'name': ReconfigVM_Task, 'duration_secs': 0.27869} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.286617] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Reconfigured VM instance instance-0000003e to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1791.291366] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22c8ff66-6914-4495-86d0-c2ff6e53829a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.310269] env[63379]: DEBUG oslo_vmware.api [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780055, 'name': ReconfigVM_Task, 'duration_secs': 0.173316} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.311490] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369451', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'name': 'volume-3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1d76a28f-822d-4b4f-be2f-2ad3371b3979', 'attached_at': '', 'detached_at': '', 'volume_id': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef', 'serial': '3d2e44ab-54ce-4ed3-b05f-eda61e23e1ef'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1791.313629] env[63379]: DEBUG oslo_vmware.api [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1791.313629] env[63379]: value = "task-1780057" [ 1791.313629] env[63379]: _type = "Task" [ 1791.313629] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.322217] env[63379]: DEBUG oslo_vmware.api [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780057, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.426980] env[63379]: DEBUG oslo_vmware.api [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Task: {'id': task-1780056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202219} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.427277] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1791.427468] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1791.427651] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1791.427846] env[63379]: INFO nova.compute.manager [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1791.428091] env[63379]: DEBUG oslo.service.loopingcall [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.428324] env[63379]: DEBUG nova.compute.manager [-] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1791.428469] env[63379]: DEBUG nova.network.neutron [-] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1791.634529] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.634787] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.634985] env[63379]: DEBUG nova.network.neutron [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1791.693898] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.697211] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.358s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.697828] env[63379]: INFO nova.compute.claims [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1791.724169] env[63379]: INFO nova.scheduler.client.report [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted allocations for instance 0f248290-a14c-4c76-98b3-4efa5bda5f05 [ 1791.756184] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1791.756435] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1791.756590] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.756770] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1791.756956] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.757129] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1791.757341] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1791.757545] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1791.757660] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1791.757821] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1791.758069] env[63379]: DEBUG nova.virt.hardware [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1791.758892] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d0949d-e7bc-4e7c-bb90-dfffabf7316d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.771034] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6513f680-0b8d-4f15-83a8-ecec8bd1dfb1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.786079] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:07:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec9d8d4a-76e2-4945-ac09-1e225b358218', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1791.793738] env[63379]: DEBUG oslo.service.loopingcall [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.794262] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1791.794483] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0862069f-e4a4-4d84-93df-20b92266fe1c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.815483] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1791.815483] env[63379]: value = "task-1780058" [ 1791.815483] env[63379]: _type = "Task" [ 1791.815483] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.825781] env[63379]: DEBUG oslo_vmware.api [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780057, 'name': ReconfigVM_Task, 'duration_secs': 0.135185} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.828351] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369441', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'name': 'volume-9d889203-dc27-4007-a5c2-f62dd5709f2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19a41941-0679-4971-8a44-c95b13f5c294', 'attached_at': '', 'detached_at': '', 'volume_id': '9d889203-dc27-4007-a5c2-f62dd5709f2f', 'serial': '9d889203-dc27-4007-a5c2-f62dd5709f2f'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1791.830573] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780058, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.854794] env[63379]: DEBUG nova.objects.instance [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1792.194887] env[63379]: DEBUG nova.network.neutron [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1792.238644] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52687a0a-c68c-4546-9dc4-ed6c4a4ea79d tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "0f248290-a14c-4c76-98b3-4efa5bda5f05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.744s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.245367] env[63379]: DEBUG nova.network.neutron [-] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.333414] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780058, 'name': CreateVM_Task, 'duration_secs': 0.353804} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.334196] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1792.335274] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.335460] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.335875] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.336155] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-198f71b2-a49c-48f4-8d96-835b5fe85134 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.342964] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1792.342964] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5209f0bd-41d2-50e3-50bb-aabdd087dc5c" [ 1792.342964] env[63379]: _type = "Task" [ 1792.342964] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.353760] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5209f0bd-41d2-50e3-50bb-aabdd087dc5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.387198] env[63379]: DEBUG nova.objects.instance [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lazy-loading 'flavor' on Instance uuid 19a41941-0679-4971-8a44-c95b13f5c294 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1792.679753] env[63379]: DEBUG nova.network.neutron [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.713281] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.749066] env[63379]: INFO nova.compute.manager [-] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Took 1.32 seconds to deallocate network for instance. [ 1792.860092] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5209f0bd-41d2-50e3-50bb-aabdd087dc5c, 'name': SearchDatastore_Task, 'duration_secs': 0.009365} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.860610] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.861328] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1792.861579] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.866879] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.866879] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1792.866879] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9955b926-379c-41b0-847a-52723bcc9049 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.385s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.866879] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3c99281-e863-4884-a869-19dbd1e20763 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.869796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.157s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.869829] env[63379]: DEBUG nova.compute.manager [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1792.870624] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351f29c3-ebc6-4df7-be9c-89c0d33c537d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.880649] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1792.880845] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1792.882926] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-067586cc-f3c3-4372-84c6-4720f54a7050 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.885407] env[63379]: DEBUG nova.compute.manager [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1792.885951] env[63379]: DEBUG nova.objects.instance [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1792.893843] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1792.893843] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b9d2dc-3e48-80e5-ea39-fa5edda0de17" [ 1792.893843] env[63379]: _type = "Task" [ 1792.893843] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.907462] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b9d2dc-3e48-80e5-ea39-fa5edda0de17, 'name': SearchDatastore_Task, 'duration_secs': 0.008993} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.912023] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dbb041f-8b9b-47c8-ae1f-314688481e09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.919024] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1792.919024] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c86b35-9584-8764-7164-51ca97dd22c1" [ 1792.919024] env[63379]: _type = "Task" [ 1792.919024] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.927909] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c86b35-9584-8764-7164-51ca97dd22c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.961688] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8cde083-27b1-4550-b2a0-d7cf9b7972f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.969984] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c729a9-865a-44a7-b915-4e2fef5c56ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.001547] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee87c1e-a3c3-491d-85be-f3c7deeb35d6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.013415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83e1cd8-d942-4a26-86f7-751d75a8082f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.023918] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "62494fa1-5990-490d-92ae-00607d7ebba1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.024519] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "62494fa1-5990-490d-92ae-00607d7ebba1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.038283] env[63379]: DEBUG nova.compute.provider_tree [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1793.076981] env[63379]: DEBUG nova.compute.manager [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-changed-c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1793.077086] env[63379]: DEBUG nova.compute.manager [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing instance network info cache due to event network-changed-c2313903-6e4e-42f8-be0f-3c00be1c0fec. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1793.077232] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.181973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.182322] env[63379]: DEBUG nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Instance network_info: |[{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1793.182631] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.182876] env[63379]: DEBUG nova.network.neutron [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing network info cache for port c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1793.184202] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:e7:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2313903-6e4e-42f8-be0f-3c00be1c0fec', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1793.192738] env[63379]: DEBUG oslo.service.loopingcall [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1793.193779] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1793.194031] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a0d024b-d25e-43b9-bb85-f257d0b83857 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.234549] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1793.234549] env[63379]: value = "task-1780059" [ 1793.234549] env[63379]: _type = "Task" [ 1793.234549] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.243389] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780059, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.259775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.391546] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1793.391823] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2242d7a-734b-4457-b42d-e65370979c7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.399373] env[63379]: DEBUG oslo_vmware.api [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1793.399373] env[63379]: value = "task-1780060" [ 1793.399373] env[63379]: _type = "Task" [ 1793.399373] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.402740] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3c93b67e-04ec-42bc-8892-c93f80a0afdd tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.336s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.409367] env[63379]: DEBUG oslo_vmware.api [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.431214] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c86b35-9584-8764-7164-51ca97dd22c1, 'name': SearchDatastore_Task, 'duration_secs': 0.013111} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.431376] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.431731] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1793.432142] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06175caa-2c68-45d5-a87d-c670a0f57465 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.440127] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1793.440127] env[63379]: value = "task-1780061" [ 1793.440127] env[63379]: _type = "Task" [ 1793.440127] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.449132] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780061, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.527146] env[63379]: DEBUG nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1793.541111] env[63379]: DEBUG nova.scheduler.client.report [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1793.748433] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780059, 'name': CreateVM_Task, 'duration_secs': 0.40094} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.748433] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1793.748433] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.748433] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.748433] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1793.748433] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c078489a-d61d-44b2-88ab-228a8e0fec0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.753623] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1793.753623] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a455fd-9b6b-0df4-25eb-f37ef6c252ae" [ 1793.753623] env[63379]: _type = "Task" [ 1793.753623] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.765877] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a455fd-9b6b-0df4-25eb-f37ef6c252ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.911180] env[63379]: DEBUG oslo_vmware.api [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780060, 'name': PowerOffVM_Task, 'duration_secs': 0.210902} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.911180] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1793.911362] env[63379]: DEBUG nova.compute.manager [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1793.912208] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265a7547-dda4-4eb7-a303-9bad48ab8ef9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.949027] env[63379]: DEBUG nova.network.neutron [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updated VIF entry in instance network info cache for port c2313903-6e4e-42f8-be0f-3c00be1c0fec. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1793.949533] env[63379]: DEBUG nova.network.neutron [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.954078] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780061, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.047255] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.047790] env[63379]: DEBUG nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1794.051160] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.051353] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.117s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.051551] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.053410] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.997s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.073857] env[63379]: INFO nova.scheduler.client.report [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted allocations for instance eda684fa-1595-4985-beb7-c298049411bf [ 1794.266060] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a455fd-9b6b-0df4-25eb-f37ef6c252ae, 'name': SearchDatastore_Task, 'duration_secs': 0.064682} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.266385] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.266633] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1794.266896] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.267085] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.267278] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1794.267547] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae8dbca6-40aa-467f-a0c6-ecaf75651115 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.280623] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1794.281290] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1794.282050] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb04aebe-f78a-462f-9e0a-e569e9ca43cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.287734] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1794.287734] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52903679-ec4e-7f1d-7d45-4038f2190b37" [ 1794.287734] env[63379]: _type = "Task" [ 1794.287734] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.295490] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52903679-ec4e-7f1d-7d45-4038f2190b37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.427415] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e976b8f-52c6-44a9-95cb-405e93a8de01 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.558s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.429707] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "19a41941-0679-4971-8a44-c95b13f5c294" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.429976] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.430241] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "19a41941-0679-4971-8a44-c95b13f5c294-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.430453] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.430628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.432462] env[63379]: INFO nova.compute.manager [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Terminating instance [ 1794.434101] env[63379]: DEBUG nova.compute.manager [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1794.434328] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1794.435204] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebbc00c-2683-4b45-838c-9dfca226dc91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.448099] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1794.448738] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb13fb98-fa67-433c-bc45-a4691d522b8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.454641] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780061, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529566} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.454641] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.454641] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1794.455484] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.455484] env[63379]: DEBUG nova.compute.manager [req-ea413a41-fbd4-4db9-9540-fbdf2ef751da req-6722e7a4-08f3-4a68-8435-6ccd47a2dc8a service nova] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Received event network-vif-deleted-844ccd54-2ca6-4d56-a26f-eea2683c760c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1794.456266] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8970239-6478-43ca-a93b-24383eb63367 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.458605] env[63379]: DEBUG oslo_vmware.api [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1794.458605] env[63379]: value = "task-1780062" [ 1794.458605] env[63379]: _type = "Task" [ 1794.458605] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.464362] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1794.464362] env[63379]: value = "task-1780063" [ 1794.464362] env[63379]: _type = "Task" [ 1794.464362] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.467533] env[63379]: DEBUG oslo_vmware.api [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.475509] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780063, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.558536] env[63379]: DEBUG nova.compute.utils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1794.566992] env[63379]: DEBUG nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1794.567192] env[63379]: DEBUG nova.network.neutron [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1794.584786] env[63379]: DEBUG oslo_concurrency.lockutils [None req-21a8507d-7301-4724-be89-15e19551cf39 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "eda684fa-1595-4985-beb7-c298049411bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.751s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.649536] env[63379]: DEBUG nova.policy [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd786092d394d1a9b444051664ac7ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f28f4532d464e6eb90ab75799990c85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1794.800508] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52903679-ec4e-7f1d-7d45-4038f2190b37, 'name': SearchDatastore_Task, 'duration_secs': 0.009401} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.801411] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c93fc10-517a-4519-94d8-dddff062a099 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.808395] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1794.808395] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cdc8e2-48a1-699e-614c-f401e2041349" [ 1794.808395] env[63379]: _type = "Task" [ 1794.808395] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.821247] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cdc8e2-48a1-699e-614c-f401e2041349, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.970024] env[63379]: DEBUG oslo_vmware.api [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780062, 'name': PowerOffVM_Task, 'duration_secs': 0.229347} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.973563] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1794.973804] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1794.974130] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-226d960f-55ae-43d1-baff-ff1bd09fce7d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.980450] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780063, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067036} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.980699] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1794.981569] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e17497-e5d4-4474-821c-a0db7befc09a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.005479] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1795.006563] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea56d786-b7e2-4685-b83a-a882db0f0693 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.027321] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1795.027321] env[63379]: value = "task-1780065" [ 1795.027321] env[63379]: _type = "Task" [ 1795.027321] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.036162] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780065, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.068376] env[63379]: DEBUG nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1795.094311] env[63379]: DEBUG nova.network.neutron [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Successfully created port: 414f9be4-c922-4750-817c-32bc1d4ac6c4 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1795.098835] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.098978] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d2de9da-9dfe-42d2-b206-bb5139b1970b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.099119] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.099239] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d76a28f-822d-4b4f-be2f-2ad3371b3979 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.099354] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 19a41941-0679-4971-8a44-c95b13f5c294 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.099466] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 510db409-0b4c-494a-8084-39ef3cd6c918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.099577] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a7cce485-7476-4ea1-b127-68d879e164cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.099687] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 19941838-d6b0-4fb8-9d06-f4a1b80ba428 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.099824] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance c900bb90-b4a8-40a2-9436-5a0ced1dd919 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1795.099943] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 4b419aa8-d4da-45fd-a6da-6f05ee851f2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.100066] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 9040201c-e1de-47d9-b9c2-b30c14e32749 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.100195] env[63379]: WARNING nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance cb62192b-63db-40d0-97bb-1df171ade64b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1795.100361] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 8078bac6-146a-4e3a-a7a7-7093f617a330 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.100414] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 9faef8ba-2263-4af8-ba5b-13a17b4275b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1795.141738] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1795.142730] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1795.142730] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleting the datastore file [datastore1] 19a41941-0679-4971-8a44-c95b13f5c294 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1795.142730] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47c391f6-91c4-4cec-99cb-96b9466ff8cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.153018] env[63379]: DEBUG oslo_vmware.api [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1795.153018] env[63379]: value = "task-1780066" [ 1795.153018] env[63379]: _type = "Task" [ 1795.153018] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.159732] env[63379]: DEBUG oslo_vmware.api [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.272793] env[63379]: DEBUG nova.objects.instance [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.320273] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cdc8e2-48a1-699e-614c-f401e2041349, 'name': SearchDatastore_Task, 'duration_secs': 0.010851} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.321050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.321440] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8078bac6-146a-4e3a-a7a7-7093f617a330/8078bac6-146a-4e3a-a7a7-7093f617a330.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1795.321896] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-065eeafc-fd16-4299-8267-6cdd472ae723 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.329882] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1795.329882] env[63379]: value = "task-1780067" [ 1795.329882] env[63379]: _type = "Task" [ 1795.329882] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.338566] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780067, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.538623] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.603250] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 62494fa1-5990-490d-92ae-00607d7ebba1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1795.603520] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1795.603760] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1795.665242] env[63379]: DEBUG oslo_vmware.api [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146085} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.669052] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1795.669052] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1795.669052] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1795.669052] env[63379]: INFO nova.compute.manager [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1795.669240] env[63379]: DEBUG oslo.service.loopingcall [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1795.669657] env[63379]: DEBUG nova.compute.manager [-] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1795.669776] env[63379]: DEBUG nova.network.neutron [-] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1795.776683] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.778103] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.778103] env[63379]: DEBUG nova.network.neutron [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1795.778103] env[63379]: DEBUG nova.objects.instance [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'info_cache' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.841164] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780067, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.843399] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcabc8c-b487-43c8-8de5-b1d85300eecc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.851203] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2047a96e-4ea2-4bb6-9cca-929d2d4ffb2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.881234] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da18abd-7c6b-471e-9307-724601126edc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.888966] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cceeeb8e-63ba-4303-868d-6daa76f07261 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.902480] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.038193] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780065, 'name': ReconfigVM_Task, 'duration_secs': 0.842991} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.038831] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749/9040201c-e1de-47d9-b9c2-b30c14e32749.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1796.039239] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acd341e8-ebb5-49c8-ba48-3dc59fa7bea9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.046276] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1796.046276] env[63379]: value = "task-1780068" [ 1796.046276] env[63379]: _type = "Task" [ 1796.046276] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.054511] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780068, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.082412] env[63379]: DEBUG nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1796.110759] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1796.111016] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1796.111182] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1796.111373] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1796.111523] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1796.111676] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1796.111889] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1796.112063] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1796.112239] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1796.112406] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1796.112591] env[63379]: DEBUG nova.virt.hardware [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1796.114027] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0016eb8-153f-4fce-8482-ac40288ff7b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.122342] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85ea374-2ad9-451e-a70b-8ea5fbc946fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.238000] env[63379]: DEBUG nova.compute.manager [req-5f1af28a-ead6-41d1-91fe-92f38fe6801e req-a5daf22a-99fe-400d-a405-bf9bbe2a9f58 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Received event network-vif-deleted-2d279162-72d1-4378-b83d-c80b2815f680 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1796.238267] env[63379]: INFO nova.compute.manager [req-5f1af28a-ead6-41d1-91fe-92f38fe6801e req-a5daf22a-99fe-400d-a405-bf9bbe2a9f58 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Neutron deleted interface 2d279162-72d1-4378-b83d-c80b2815f680; detaching it from the instance and deleting it from the info cache [ 1796.238427] env[63379]: DEBUG nova.network.neutron [req-5f1af28a-ead6-41d1-91fe-92f38fe6801e req-a5daf22a-99fe-400d-a405-bf9bbe2a9f58 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.284059] env[63379]: DEBUG nova.objects.base [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Object Instance<1d76a28f-822d-4b4f-be2f-2ad3371b3979> lazy-loaded attributes: flavor,info_cache {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1796.341554] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780067, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565042} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.341877] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 8078bac6-146a-4e3a-a7a7-7093f617a330/8078bac6-146a-4e3a-a7a7-7093f617a330.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1796.342110] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1796.342362] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-371e4fc2-2d9b-4456-a0b8-9e22bbe651b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.349251] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1796.349251] env[63379]: value = "task-1780069" [ 1796.349251] env[63379]: _type = "Task" [ 1796.349251] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.356406] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.405481] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1796.556283] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780068, 'name': Rename_Task, 'duration_secs': 0.138888} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.556567] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1796.556806] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-334264eb-3044-4023-8ac7-301d67530803 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.564137] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1796.564137] env[63379]: value = "task-1780070" [ 1796.564137] env[63379]: _type = "Task" [ 1796.564137] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.573150] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.623157] env[63379]: DEBUG nova.network.neutron [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Successfully updated port: 414f9be4-c922-4750-817c-32bc1d4ac6c4 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1796.711829] env[63379]: DEBUG nova.network.neutron [-] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.741720] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f30f20f2-283b-4730-afff-5dcaa3567df7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.751298] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82187b05-f932-43d2-a48c-f619d2bb566c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.784536] env[63379]: DEBUG nova.compute.manager [req-5f1af28a-ead6-41d1-91fe-92f38fe6801e req-a5daf22a-99fe-400d-a405-bf9bbe2a9f58 service nova] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Detach interface failed, port_id=2d279162-72d1-4378-b83d-c80b2815f680, reason: Instance 19a41941-0679-4971-8a44-c95b13f5c294 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1796.859758] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061528} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.860392] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1796.862753] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fef3b9-0d38-4663-b02b-34b270bfc74e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.882797] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 8078bac6-146a-4e3a-a7a7-7093f617a330/8078bac6-146a-4e3a-a7a7-7093f617a330.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1796.883092] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-256d9adc-7cb1-449b-bd3e-9f2b44f33d12 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.904321] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1796.904321] env[63379]: value = "task-1780071" [ 1796.904321] env[63379]: _type = "Task" [ 1796.904321] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.913254] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1796.913462] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.860s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.913711] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.913962] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.276s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.914169] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.916031] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.656s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.916235] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.917862] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.867s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.919352] env[63379]: INFO nova.compute.claims [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.923474] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.923474] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11274}} [ 1796.950140] env[63379]: INFO nova.scheduler.client.report [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Deleted allocations for instance c900bb90-b4a8-40a2-9436-5a0ced1dd919 [ 1796.951987] env[63379]: INFO nova.scheduler.client.report [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Deleted allocations for instance cb62192b-63db-40d0-97bb-1df171ade64b [ 1797.046636] env[63379]: DEBUG nova.network.neutron [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating instance_info_cache with network_info: [{"id": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "address": "fa:16:3e:00:25:b1", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ac41cb5-75", "ovs_interfaceid": "2ac41cb5-759a-42a6-a664-26ad0cc81d81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.075289] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780070, 'name': PowerOnVM_Task, 'duration_secs': 0.469643} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.075599] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.075840] env[63379]: DEBUG nova.compute.manager [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1797.076715] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7b6610-4b7c-45d3-9c69-4274ddb3f57c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.125286] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.125444] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.125596] env[63379]: DEBUG nova.network.neutron [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1797.215020] env[63379]: INFO nova.compute.manager [-] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Took 1.55 seconds to deallocate network for instance. [ 1797.414788] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780071, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.437783] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] There are 57 instances to clean {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11283}} [ 1797.438033] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 2254844f-b1f9-435e-ac8a-f114f05331e1] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1797.462928] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7dd0b351-e4c9-49f1-a41f-241bd39033bc tempest-DeleteServersAdminTestJSON-1737462030 tempest-DeleteServersAdminTestJSON-1737462030-project-admin] Lock "cb62192b-63db-40d0-97bb-1df171ade64b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.266s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.464288] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ae55e565-bd8a-4779-b315-47764b50cf13 tempest-ListServerFiltersTestJSON-1740909441 tempest-ListServerFiltersTestJSON-1740909441-project-member] Lock "c900bb90-b4a8-40a2-9436-5a0ced1dd919" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.707s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.549482] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "refresh_cache-1d76a28f-822d-4b4f-be2f-2ad3371b3979" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.588556] env[63379]: INFO nova.compute.manager [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] bringing vm to original state: 'stopped' [ 1797.674207] env[63379]: DEBUG nova.network.neutron [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1797.721384] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.867545] env[63379]: DEBUG nova.network.neutron [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance_info_cache with network_info: [{"id": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "address": "fa:16:3e:a5:a9:b3", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap414f9be4-c9", "ovs_interfaceid": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.915579] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780071, 'name': ReconfigVM_Task, 'duration_secs': 0.749998} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.915886] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 8078bac6-146a-4e3a-a7a7-7093f617a330/8078bac6-146a-4e3a-a7a7-7093f617a330.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1797.916554] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc0ae877-6992-49db-9def-941ce452f891 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.923802] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1797.923802] env[63379]: value = "task-1780072" [ 1797.923802] env[63379]: _type = "Task" [ 1797.923802] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.931796] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780072, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.945453] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0f248290-a14c-4c76-98b3-4efa5bda5f05] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1798.053316] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1798.053661] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36cf3427-9ae2-488a-a93d-5f24d2308101 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.061461] env[63379]: DEBUG oslo_vmware.api [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1798.061461] env[63379]: value = "task-1780073" [ 1798.061461] env[63379]: _type = "Task" [ 1798.061461] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.073674] env[63379]: DEBUG oslo_vmware.api [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.155835] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96556fa-0250-4098-9874-13feb0a08389 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.162571] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79cab91-9dd0-4883-9282-c01ed5574a73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.193956] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb495ab-e2c3-4707-b833-f96129e4ef8c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.201943] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75662738-f5bb-4064-a294-88cba993bc5b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.216576] env[63379]: DEBUG nova.compute.provider_tree [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1798.267859] env[63379]: DEBUG nova.compute.manager [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Received event network-vif-plugged-414f9be4-c922-4750-817c-32bc1d4ac6c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1798.268017] env[63379]: DEBUG oslo_concurrency.lockutils [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] Acquiring lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.268259] env[63379]: DEBUG oslo_concurrency.lockutils [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.268473] env[63379]: DEBUG oslo_concurrency.lockutils [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.268592] env[63379]: DEBUG nova.compute.manager [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] No waiting events found dispatching network-vif-plugged-414f9be4-c922-4750-817c-32bc1d4ac6c4 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1798.268763] env[63379]: WARNING nova.compute.manager [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Received unexpected event network-vif-plugged-414f9be4-c922-4750-817c-32bc1d4ac6c4 for instance with vm_state building and task_state spawning. [ 1798.268964] env[63379]: DEBUG nova.compute.manager [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Received event network-changed-414f9be4-c922-4750-817c-32bc1d4ac6c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1798.269255] env[63379]: DEBUG nova.compute.manager [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Refreshing instance network info cache due to event network-changed-414f9be4-c922-4750-817c-32bc1d4ac6c4. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1798.269546] env[63379]: DEBUG oslo_concurrency.lockutils [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] Acquiring lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.370648] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.370765] env[63379]: DEBUG nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Instance network_info: |[{"id": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "address": "fa:16:3e:a5:a9:b3", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap414f9be4-c9", "ovs_interfaceid": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1798.371085] env[63379]: DEBUG oslo_concurrency.lockutils [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] Acquired lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.371277] env[63379]: DEBUG nova.network.neutron [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Refreshing network info cache for port 414f9be4-c922-4750-817c-32bc1d4ac6c4 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1798.372660] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:a9:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '414f9be4-c922-4750-817c-32bc1d4ac6c4', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1798.380206] env[63379]: DEBUG oslo.service.loopingcall [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1798.382925] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1798.383843] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0c015f7-afdf-4e3e-afc1-188ea8eaae36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.403562] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1798.403562] env[63379]: value = "task-1780074" [ 1798.403562] env[63379]: _type = "Task" [ 1798.403562] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.413469] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780074, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.436167] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780072, 'name': Rename_Task, 'duration_secs': 0.165466} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.436586] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1798.436806] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a53e6970-e2fd-4796-8708-d245420bd1a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.443117] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1798.443117] env[63379]: value = "task-1780075" [ 1798.443117] env[63379]: _type = "Task" [ 1798.443117] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.451482] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: ebfe6204-c7d5-4e0c-bb63-74d5755552f6] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1798.453309] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780075, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.573357] env[63379]: DEBUG oslo_vmware.api [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780073, 'name': PowerOnVM_Task, 'duration_secs': 0.409854} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.573733] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.573850] env[63379]: DEBUG nova.compute.manager [None req-8661f690-733f-49e0-9db4-71ab3c2f5699 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1798.574658] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1152d186-e035-4489-b19a-9772adfea4ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.600063] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "9040201c-e1de-47d9-b9c2-b30c14e32749" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.600407] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.600678] env[63379]: DEBUG nova.compute.manager [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1798.601975] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e147490-25ca-4c94-9899-c4d8dd95baa7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.607605] env[63379]: DEBUG nova.network.neutron [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updated VIF entry in instance network info cache for port 414f9be4-c922-4750-817c-32bc1d4ac6c4. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1798.608127] env[63379]: DEBUG nova.network.neutron [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance_info_cache with network_info: [{"id": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "address": "fa:16:3e:a5:a9:b3", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap414f9be4-c9", "ovs_interfaceid": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.614710] env[63379]: DEBUG nova.compute.manager [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1798.617529] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1798.617863] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e6dcd61-ac8e-4041-9b3c-b91a9780188d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.626563] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1798.626563] env[63379]: value = "task-1780076" [ 1798.626563] env[63379]: _type = "Task" [ 1798.626563] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.636160] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.714021] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "f99bad46-931d-497a-8586-b140309b0b45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.714302] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "f99bad46-931d-497a-8586-b140309b0b45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.720162] env[63379]: DEBUG nova.scheduler.client.report [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1798.916135] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780074, 'name': CreateVM_Task, 'duration_secs': 0.363477} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.916413] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1798.917089] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.917269] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.917593] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1798.917878] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1e33562-843a-415e-a153-27e77f6d4062 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.922676] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1798.922676] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4c461-29af-6b10-811a-f58a6da42d51" [ 1798.922676] env[63379]: _type = "Task" [ 1798.922676] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.930409] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4c461-29af-6b10-811a-f58a6da42d51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.952425] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780075, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.955040] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 3e875e92-673c-4cfa-86ce-fc270ae03e94] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1799.111775] env[63379]: DEBUG oslo_concurrency.lockutils [req-112aab9a-6b3d-4ba2-8050-ef88c672be24 req-02c01642-f02a-4f86-a06a-6d8122fe61a3 service nova] Releasing lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.137056] env[63379]: DEBUG oslo_vmware.api [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780076, 'name': PowerOffVM_Task, 'duration_secs': 0.165526} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.137056] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1799.137056] env[63379]: DEBUG nova.compute.manager [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1799.137757] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dda7749-8224-40aa-a809-01b76a8cc6bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.217102] env[63379]: DEBUG nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1799.224719] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.307s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.225386] env[63379]: DEBUG nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1799.227915] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.507s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.228177] env[63379]: DEBUG nova.objects.instance [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lazy-loading 'resources' on Instance uuid 19a41941-0679-4971-8a44-c95b13f5c294 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1799.434325] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4c461-29af-6b10-811a-f58a6da42d51, 'name': SearchDatastore_Task, 'duration_secs': 0.011059} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.434710] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.435141] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1799.435558] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.435862] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.436221] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1799.436675] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1845ecd1-dca0-453f-8710-8cca148108bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.453312] env[63379]: DEBUG oslo_vmware.api [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780075, 'name': PowerOnVM_Task, 'duration_secs': 0.684414} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.453638] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1799.453862] env[63379]: INFO nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Took 9.43 seconds to spawn the instance on the hypervisor. [ 1799.454081] env[63379]: DEBUG nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1799.454951] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3260724f-9e55-4905-85e4-ca01e215b33f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.458268] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: eda684fa-1595-4985-beb7-c298049411bf] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1799.475374] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1799.475617] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1799.476478] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb447aa9-5359-4822-a5b5-c0b0de5a7635 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.482874] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1799.482874] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e03bd-6bc7-024c-50bc-9b20a376c809" [ 1799.482874] env[63379]: _type = "Task" [ 1799.482874] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.491910] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e03bd-6bc7-024c-50bc-9b20a376c809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.649695] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.049s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.731064] env[63379]: DEBUG nova.compute.utils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.735048] env[63379]: DEBUG nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1799.735536] env[63379]: DEBUG nova.network.neutron [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1799.741138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.775522] env[63379]: DEBUG nova.policy [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991a93509b8943a693859488a56352b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928a9d102f0e45b897eae72fa566c0fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1799.907379] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bbefc9-e46f-4284-b974-d6f2e7154864 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.915569] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f862845-2b95-4778-bdd0-fb189b3917f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.945512] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c341a566-72ca-4e6c-a748-222dd4fe3f97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.953413] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2c4ee7-ee30-4225-892e-1892ab3df945 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.968440] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 8877e0f7-091b-4a91-bb5c-fb7733e5f70c] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1799.976024] env[63379]: DEBUG nova.compute.provider_tree [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.978893] env[63379]: INFO nova.compute.manager [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Took 19.32 seconds to build instance. [ 1799.993381] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e03bd-6bc7-024c-50bc-9b20a376c809, 'name': SearchDatastore_Task, 'duration_secs': 0.072374} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.994148] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b898e242-a685-4e3e-b759-95e39b2567c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.000331] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1800.000331] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a00e9e-26d3-a061-b77b-b99dc1860dcc" [ 1800.000331] env[63379]: _type = "Task" [ 1800.000331] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.009849] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a00e9e-26d3-a061-b77b-b99dc1860dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.072896] env[63379]: DEBUG nova.network.neutron [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Successfully created port: 141d6d25-1ead-4801-ad93-5aaf2b65562c {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1800.111992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "9040201c-e1de-47d9-b9c2-b30c14e32749" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.112316] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.112536] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "9040201c-e1de-47d9-b9c2-b30c14e32749-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.112724] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.112904] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.116282] env[63379]: INFO nova.compute.manager [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Terminating instance [ 1800.118307] env[63379]: DEBUG nova.compute.manager [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1800.118516] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1800.119421] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30ecb61-9b35-4256-b0e1-6b30de042149 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.128078] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1800.128235] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d1af822-dda2-4c3e-93b1-46c22bd18990 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.159231] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.236188] env[63379]: DEBUG nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1800.242092] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1800.242333] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1800.242518] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleting the datastore file [datastore1] 9040201c-e1de-47d9-b9c2-b30c14e32749 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1800.242778] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14184dab-a86d-4472-8ba1-c03192bfee92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.249998] env[63379]: DEBUG oslo_vmware.api [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1800.249998] env[63379]: value = "task-1780078" [ 1800.249998] env[63379]: _type = "Task" [ 1800.249998] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.259095] env[63379]: DEBUG oslo_vmware.api [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.476928] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: b9bc2562-9475-400e-9cf9-646b8f4c8cf2] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1800.481270] env[63379]: DEBUG nova.scheduler.client.report [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1800.487971] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0a05799a-8b23-466a-83b3-a0a0b7e79270 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.834s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.516390] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a00e9e-26d3-a061-b77b-b99dc1860dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.641513] env[63379]: DEBUG nova.compute.manager [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-changed-c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1800.641711] env[63379]: DEBUG nova.compute.manager [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing instance network info cache due to event network-changed-c2313903-6e4e-42f8-be0f-3c00be1c0fec. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1800.641932] env[63379]: DEBUG oslo_concurrency.lockutils [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.642091] env[63379]: DEBUG oslo_concurrency.lockutils [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.642257] env[63379]: DEBUG nova.network.neutron [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing network info cache for port c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.759999] env[63379]: DEBUG oslo_vmware.api [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.980611] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 6f3283be-cbc0-41f1-ac2b-554b2a6b9fcf] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1800.990179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.993440] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.252s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.995868] env[63379]: INFO nova.compute.claims [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1801.020163] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a00e9e-26d3-a061-b77b-b99dc1860dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.906371} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.021841] env[63379]: INFO nova.scheduler.client.report [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleted allocations for instance 19a41941-0679-4971-8a44-c95b13f5c294 [ 1801.026022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.026371] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 9faef8ba-2263-4af8-ba5b-13a17b4275b6/9faef8ba-2263-4af8-ba5b-13a17b4275b6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1801.026843] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43d827bb-413a-4b70-81c3-9dffbdfc0d0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.035973] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1801.035973] env[63379]: value = "task-1780079" [ 1801.035973] env[63379]: _type = "Task" [ 1801.035973] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.044656] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.247281] env[63379]: DEBUG nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1801.263518] env[63379]: DEBUG oslo_vmware.api [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.788942} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.263929] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1801.264073] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1801.264218] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1801.264484] env[63379]: INFO nova.compute.manager [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1801.264751] env[63379]: DEBUG oslo.service.loopingcall [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.266869] env[63379]: DEBUG nova.compute.manager [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1801.267014] env[63379]: DEBUG nova.network.neutron [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1801.278574] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1801.279134] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1801.279134] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1801.279353] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1801.279510] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1801.279695] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1801.279942] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1801.280220] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1801.280358] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1801.280548] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1801.280749] env[63379]: DEBUG nova.virt.hardware [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1801.281739] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f19542-13c6-401e-9a5f-4543fba03029 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.293268] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa932a76-b72d-4eb4-a711-e9c342b9604a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.484787] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: fad7a2dd-291f-4105-95a6-56bdbcc7acb4] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1801.532864] env[63379]: DEBUG oslo_concurrency.lockutils [None req-67a48870-7e35-4c65-a7be-8e4f17190e64 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "19a41941-0679-4971-8a44-c95b13f5c294" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.103s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.537140] env[63379]: DEBUG nova.network.neutron [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updated VIF entry in instance network info cache for port c2313903-6e4e-42f8-be0f-3c00be1c0fec. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1801.537244] env[63379]: DEBUG nova.network.neutron [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.550024] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780079, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.633522] env[63379]: DEBUG nova.compute.manager [req-ea21337e-39c7-4ab1-ba5a-8f637a238349 req-b6a1e193-2b79-4c51-b6d0-740c4816d23b service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Received event network-vif-plugged-141d6d25-1ead-4801-ad93-5aaf2b65562c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1801.633897] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea21337e-39c7-4ab1-ba5a-8f637a238349 req-b6a1e193-2b79-4c51-b6d0-740c4816d23b service nova] Acquiring lock "62494fa1-5990-490d-92ae-00607d7ebba1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.633970] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea21337e-39c7-4ab1-ba5a-8f637a238349 req-b6a1e193-2b79-4c51-b6d0-740c4816d23b service nova] Lock "62494fa1-5990-490d-92ae-00607d7ebba1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.634297] env[63379]: DEBUG oslo_concurrency.lockutils [req-ea21337e-39c7-4ab1-ba5a-8f637a238349 req-b6a1e193-2b79-4c51-b6d0-740c4816d23b service nova] Lock "62494fa1-5990-490d-92ae-00607d7ebba1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.634487] env[63379]: DEBUG nova.compute.manager [req-ea21337e-39c7-4ab1-ba5a-8f637a238349 req-b6a1e193-2b79-4c51-b6d0-740c4816d23b service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] No waiting events found dispatching network-vif-plugged-141d6d25-1ead-4801-ad93-5aaf2b65562c {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1801.634657] env[63379]: WARNING nova.compute.manager [req-ea21337e-39c7-4ab1-ba5a-8f637a238349 req-b6a1e193-2b79-4c51-b6d0-740c4816d23b service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Received unexpected event network-vif-plugged-141d6d25-1ead-4801-ad93-5aaf2b65562c for instance with vm_state building and task_state spawning. [ 1801.731834] env[63379]: DEBUG nova.network.neutron [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Successfully updated port: 141d6d25-1ead-4801-ad93-5aaf2b65562c {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1801.988328] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 861cda26-f938-4b2e-ba3d-56b8469b6034] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1802.009631] env[63379]: DEBUG nova.network.neutron [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.048417] env[63379]: DEBUG oslo_concurrency.lockutils [req-0d67804a-ed3c-4855-bf3d-d8d055c71a8d req-723fcbc0-d995-44bb-8abd-a1c3a2e4a235 service nova] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.057245] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780079, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.187031] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d79bf73-7b9a-4f40-b7d4-8f68132b6f25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.195783] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aca68cd-2aaf-412f-aa3b-c05c242e48a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.225811] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cec3fe7-75f7-485e-9904-814375e4f22f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.233833] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc98330d-1b43-4125-abd8-e2e1816d9616 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.239888] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-62494fa1-5990-490d-92ae-00607d7ebba1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.240044] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-62494fa1-5990-490d-92ae-00607d7ebba1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.240195] env[63379]: DEBUG nova.network.neutron [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1802.251262] env[63379]: DEBUG nova.compute.provider_tree [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.495478] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 8b33e64a-ea19-4974-8c2d-350615b1e061] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1802.512267] env[63379]: INFO nova.compute.manager [-] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Took 1.25 seconds to deallocate network for instance. [ 1802.548452] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780079, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.297872} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.549299] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 9faef8ba-2263-4af8-ba5b-13a17b4275b6/9faef8ba-2263-4af8-ba5b-13a17b4275b6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1802.549525] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1802.549783] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-413db6f6-0794-4381-b421-7166384f3c55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.556691] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1802.556691] env[63379]: value = "task-1780081" [ 1802.556691] env[63379]: _type = "Task" [ 1802.556691] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.564684] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.666574] env[63379]: DEBUG nova.compute.manager [req-92bba8b4-3eba-4743-9bb7-ef6fcc899148 req-54fbfcbe-f691-49a3-b733-e26661a59755 service nova] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Received event network-vif-deleted-ec9d8d4a-76e2-4945-ac09-1e225b358218 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1802.754681] env[63379]: DEBUG nova.scheduler.client.report [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1802.776097] env[63379]: DEBUG nova.network.neutron [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1802.905578] env[63379]: DEBUG nova.network.neutron [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Updating instance_info_cache with network_info: [{"id": "141d6d25-1ead-4801-ad93-5aaf2b65562c", "address": "fa:16:3e:72:7f:85", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141d6d25-1e", "ovs_interfaceid": "141d6d25-1ead-4801-ad93-5aaf2b65562c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.998910] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 7687aaa1-d1a0-4d0d-a6b4-47c454fe3655] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1803.020277] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.067050] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780081, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065502} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.067274] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1803.068075] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee95215-8c18-4d4d-8dc7-f804a577dcbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.091268] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 9faef8ba-2263-4af8-ba5b-13a17b4275b6/9faef8ba-2263-4af8-ba5b-13a17b4275b6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1803.091881] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b3c49e9-699c-481b-a2ae-994d45c054c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.112315] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1803.112315] env[63379]: value = "task-1780082" [ 1803.112315] env[63379]: _type = "Task" [ 1803.112315] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.120532] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780082, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.260039] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.260383] env[63379]: DEBUG nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1803.263173] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.104s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.263294] env[63379]: DEBUG nova.objects.instance [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1803.408686] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-62494fa1-5990-490d-92ae-00607d7ebba1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.409112] env[63379]: DEBUG nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Instance network_info: |[{"id": "141d6d25-1ead-4801-ad93-5aaf2b65562c", "address": "fa:16:3e:72:7f:85", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141d6d25-1e", "ovs_interfaceid": "141d6d25-1ead-4801-ad93-5aaf2b65562c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1803.409641] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:7f:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '141d6d25-1ead-4801-ad93-5aaf2b65562c', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1803.426014] env[63379]: DEBUG oslo.service.loopingcall [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1803.426014] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1803.426014] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8c0a1db-9335-4b3d-bc8b-8f2e1c47f4c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.449964] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1803.449964] env[63379]: value = "task-1780083" [ 1803.449964] env[63379]: _type = "Task" [ 1803.449964] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.459982] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780083, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.504444] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: da66c3d9-ca03-4113-8703-64b666628936] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1803.623893] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.660538] env[63379]: DEBUG nova.compute.manager [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Received event network-changed-141d6d25-1ead-4801-ad93-5aaf2b65562c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1803.660753] env[63379]: DEBUG nova.compute.manager [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Refreshing instance network info cache due to event network-changed-141d6d25-1ead-4801-ad93-5aaf2b65562c. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1803.660976] env[63379]: DEBUG oslo_concurrency.lockutils [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] Acquiring lock "refresh_cache-62494fa1-5990-490d-92ae-00607d7ebba1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.661198] env[63379]: DEBUG oslo_concurrency.lockutils [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] Acquired lock "refresh_cache-62494fa1-5990-490d-92ae-00607d7ebba1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.661374] env[63379]: DEBUG nova.network.neutron [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Refreshing network info cache for port 141d6d25-1ead-4801-ad93-5aaf2b65562c {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1803.767438] env[63379]: DEBUG nova.compute.utils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1803.771746] env[63379]: DEBUG nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1803.771934] env[63379]: DEBUG nova.network.neutron [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1803.837928] env[63379]: DEBUG nova.policy [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8248c5ed17044fdb5c07c0534d9354f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '734449c322394434a93a7b427d8ed7e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1803.960186] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780083, 'name': CreateVM_Task, 'duration_secs': 0.381073} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.960415] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1803.961069] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.961216] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.961570] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1803.961821] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-077d1db9-814f-4afc-93c6-ae8f1734565d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.966665] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1803.966665] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e9aa39-02e5-ec95-521c-c6a2630e1b86" [ 1803.966665] env[63379]: _type = "Task" [ 1803.966665] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.974475] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e9aa39-02e5-ec95-521c-c6a2630e1b86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.006249] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 266cc3d5-c10d-4367-a879-d170802495db] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1804.124026] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780082, 'name': ReconfigVM_Task, 'duration_secs': 0.715907} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.124824] env[63379]: DEBUG nova.network.neutron [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Successfully created port: 93473bbe-e8ba-4cf7-b6ad-8880f0752f3f {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1804.126796] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 9faef8ba-2263-4af8-ba5b-13a17b4275b6/9faef8ba-2263-4af8-ba5b-13a17b4275b6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1804.127457] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-277080a1-77cc-40d0-af66-aec00d2b6839 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.134100] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1804.134100] env[63379]: value = "task-1780084" [ 1804.134100] env[63379]: _type = "Task" [ 1804.134100] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.143463] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780084, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.272665] env[63379]: DEBUG nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1804.278862] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bffe3f78-7ced-438a-94b0-44e4649915a3 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.280329] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.260s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.280676] env[63379]: DEBUG nova.objects.instance [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'resources' on Instance uuid 9040201c-e1de-47d9-b9c2-b30c14e32749 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1804.369218] env[63379]: DEBUG oslo_concurrency.lockutils [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.369479] env[63379]: DEBUG oslo_concurrency.lockutils [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.477564] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e9aa39-02e5-ec95-521c-c6a2630e1b86, 'name': SearchDatastore_Task, 'duration_secs': 0.009621} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.477912] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.478187] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1804.478438] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.478587] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.478768] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1804.479040] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33a0e9fc-af50-418b-a472-c6b37ea4c3fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.487824] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1804.489015] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1804.489015] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfd78841-5a5d-4f8f-aa04-c440cc4538b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.494395] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1804.494395] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f88a44-0744-e89b-6192-22ad520d904e" [ 1804.494395] env[63379]: _type = "Task" [ 1804.494395] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.502490] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f88a44-0744-e89b-6192-22ad520d904e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.511794] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: fbbe64e4-eaa5-41d1-9a70-1aac44f4bc2f] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1804.645699] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780084, 'name': Rename_Task, 'duration_secs': 0.297421} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.645966] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1804.646235] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a3e736d-317e-4702-9ee8-e3ce67eab70b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.653079] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1804.653079] env[63379]: value = "task-1780085" [ 1804.653079] env[63379]: _type = "Task" [ 1804.653079] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.663770] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.773211] env[63379]: DEBUG nova.network.neutron [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Updated VIF entry in instance network info cache for port 141d6d25-1ead-4801-ad93-5aaf2b65562c. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1804.773866] env[63379]: DEBUG nova.network.neutron [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Updating instance_info_cache with network_info: [{"id": "141d6d25-1ead-4801-ad93-5aaf2b65562c", "address": "fa:16:3e:72:7f:85", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap141d6d25-1e", "ovs_interfaceid": "141d6d25-1ead-4801-ad93-5aaf2b65562c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.841590] env[63379]: INFO nova.compute.manager [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Rebuilding instance [ 1804.877081] env[63379]: DEBUG nova.compute.utils [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.882520] env[63379]: DEBUG nova.compute.manager [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1804.883972] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1d11ba-198a-4c19-a62e-0c7f78234c5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.959732] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b77313-dc7d-465b-bbf0-db2f42f54738 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.967096] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af5bf77-1614-45fc-9b0f-574e93817c21 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.001135] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb58d91-0227-451f-8308-1adc4e241096 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.012336] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccdf4b8-55d3-4a2d-b59a-07d97d08278d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.016277] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f88a44-0744-e89b-6192-22ad520d904e, 'name': SearchDatastore_Task, 'duration_secs': 0.008841} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.017587] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: c1858f41-75e7-4eee-a6db-493e150622ef] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1805.020625] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57b5418b-de9e-49d3-8751-1bfb01c00846 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.032280] env[63379]: DEBUG nova.compute.provider_tree [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.036586] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1805.036586] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa5078-77ed-e0f5-a6ad-bc3edb41048d" [ 1805.036586] env[63379]: _type = "Task" [ 1805.036586] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.045291] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa5078-77ed-e0f5-a6ad-bc3edb41048d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.163694] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780085, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.277194] env[63379]: DEBUG oslo_concurrency.lockutils [req-42102409-c43a-4531-a569-71e511a82695 req-5f09d184-31a4-467f-982f-e542b32a8389 service nova] Releasing lock "refresh_cache-62494fa1-5990-490d-92ae-00607d7ebba1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.284587] env[63379]: DEBUG nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1805.309201] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1805.309464] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1805.309627] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.309816] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1805.309970] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.310141] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1805.310357] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1805.310519] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1805.310687] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1805.310854] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1805.311051] env[63379]: DEBUG nova.virt.hardware [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1805.311909] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284859a0-52be-4321-b943-531a6ca22702 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.320197] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5445484-5aa8-4e69-8526-076c7a33a8eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.383927] env[63379]: DEBUG oslo_concurrency.lockutils [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.397079] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1805.397375] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-961d6f26-f42a-4f56-8660-05a32d5de161 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.404759] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1805.404759] env[63379]: value = "task-1780086" [ 1805.404759] env[63379]: _type = "Task" [ 1805.404759] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.414585] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.532560] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: f087b3ac-13e2-4e55-a3ce-5e6bd3379239] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1805.535764] env[63379]: DEBUG nova.scheduler.client.report [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1805.549899] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fa5078-77ed-e0f5-a6ad-bc3edb41048d, 'name': SearchDatastore_Task, 'duration_secs': 0.00965} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.550887] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.551217] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 62494fa1-5990-490d-92ae-00607d7ebba1/62494fa1-5990-490d-92ae-00607d7ebba1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1805.551651] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c32b4844-9b9c-41ac-a292-33b1000977b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.558554] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1805.558554] env[63379]: value = "task-1780087" [ 1805.558554] env[63379]: _type = "Task" [ 1805.558554] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.566852] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.664415] env[63379]: DEBUG oslo_vmware.api [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780085, 'name': PowerOnVM_Task, 'duration_secs': 0.887996} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.664830] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1805.665156] env[63379]: INFO nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Took 9.58 seconds to spawn the instance on the hypervisor. [ 1805.665432] env[63379]: DEBUG nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1805.666298] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c66b232-c711-4f8d-ac1a-420302e3a51b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.732357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.732679] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.732937] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.733183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.733403] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.735653] env[63379]: INFO nova.compute.manager [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Terminating instance [ 1805.737481] env[63379]: DEBUG nova.compute.manager [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1805.737671] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1805.738537] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d38525-678a-40ab-b5ad-25a104f56a7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.746466] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1805.746688] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-deec2e74-f1bf-4c1a-ad23-b22be9247d71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.753897] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1805.753897] env[63379]: value = "task-1780088" [ 1805.753897] env[63379]: _type = "Task" [ 1805.753897] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.762280] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.915552] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780086, 'name': PowerOffVM_Task, 'duration_secs': 0.325088} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.916335] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1805.916335] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1805.917449] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756f6c59-80cd-4a9a-8483-b89185e9512c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.924033] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1805.924161] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d202c294-38f3-4d44-8752-c0c921884430 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.040760] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 36681a38-7cfd-44cf-8b8f-1f4dfb613f4f] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1806.043164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.763s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.056794] env[63379]: DEBUG nova.compute.manager [req-39a45306-809d-45fe-892b-b96709d074fa req-3828764d-6e72-463c-912b-3ef878022ccc service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Received event network-vif-plugged-93473bbe-e8ba-4cf7-b6ad-8880f0752f3f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1806.056915] env[63379]: DEBUG oslo_concurrency.lockutils [req-39a45306-809d-45fe-892b-b96709d074fa req-3828764d-6e72-463c-912b-3ef878022ccc service nova] Acquiring lock "f99bad46-931d-497a-8586-b140309b0b45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.057278] env[63379]: DEBUG oslo_concurrency.lockutils [req-39a45306-809d-45fe-892b-b96709d074fa req-3828764d-6e72-463c-912b-3ef878022ccc service nova] Lock "f99bad46-931d-497a-8586-b140309b0b45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.057502] env[63379]: DEBUG oslo_concurrency.lockutils [req-39a45306-809d-45fe-892b-b96709d074fa req-3828764d-6e72-463c-912b-3ef878022ccc service nova] Lock "f99bad46-931d-497a-8586-b140309b0b45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.059677] env[63379]: DEBUG nova.compute.manager [req-39a45306-809d-45fe-892b-b96709d074fa req-3828764d-6e72-463c-912b-3ef878022ccc service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] No waiting events found dispatching network-vif-plugged-93473bbe-e8ba-4cf7-b6ad-8880f0752f3f {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1806.059677] env[63379]: WARNING nova.compute.manager [req-39a45306-809d-45fe-892b-b96709d074fa req-3828764d-6e72-463c-912b-3ef878022ccc service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Received unexpected event network-vif-plugged-93473bbe-e8ba-4cf7-b6ad-8880f0752f3f for instance with vm_state building and task_state spawning. [ 1806.069356] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.075777] env[63379]: INFO nova.scheduler.client.report [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted allocations for instance 9040201c-e1de-47d9-b9c2-b30c14e32749 [ 1806.185342] env[63379]: INFO nova.compute.manager [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Took 20.87 seconds to build instance. [ 1806.206523] env[63379]: DEBUG nova.network.neutron [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Successfully updated port: 93473bbe-e8ba-4cf7-b6ad-8880f0752f3f {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1806.265023] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780088, 'name': PowerOffVM_Task, 'duration_secs': 0.28747} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.265023] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1806.265199] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1806.265442] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d41da56b-d0d3-409b-bf05-0ffb3433722c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.443608] env[63379]: DEBUG oslo_concurrency.lockutils [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.447024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.447024] env[63379]: INFO nova.compute.manager [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Attaching volume e897c929-908c-41dd-b7b3-54172d033a9c to /dev/sdb [ 1806.485278] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c458d52b-4bf2-4204-b7bc-5171b1c9f755 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.493171] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50943df-140b-4311-bbae-75efe34a1447 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.507286] env[63379]: DEBUG nova.virt.block_device [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updating existing volume attachment record: 57db9d31-e3fc-4caa-a19c-b9919dc3f70a {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1806.547591] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 5aad86f8-0b3b-43ca-982b-c670e3411c01] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1806.569740] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.583642] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a8071726-5b1e-4b8e-a419-a4adbb5da23a tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "9040201c-e1de-47d9-b9c2-b30c14e32749" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.471s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.615179] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1806.615458] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1806.615660] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleting the datastore file [datastore1] 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1806.616520] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d00dc60-8196-4081-a1da-c8832c750f0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.619113] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1806.619388] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1806.619577] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleting the datastore file [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1806.619860] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24cc769f-59c4-43b4-af1d-6b8686d0514d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.626125] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1806.626125] env[63379]: value = "task-1780091" [ 1806.626125] env[63379]: _type = "Task" [ 1806.626125] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.627371] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1806.627371] env[63379]: value = "task-1780092" [ 1806.627371] env[63379]: _type = "Task" [ 1806.627371] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.638575] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.641511] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.687954] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5910ae3b-a693-4f2a-a1f3-a43763d4a5f9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.382s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.706935] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "refresh_cache-f99bad46-931d-497a-8586-b140309b0b45" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.707126] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired lock "refresh_cache-f99bad46-931d-497a-8586-b140309b0b45" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.707295] env[63379]: DEBUG nova.network.neutron [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1807.055063] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: b91a5b89-0456-431d-b099-adda3a6b3024] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1807.074384] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.139895] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.142810] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.244490] env[63379]: DEBUG nova.network.neutron [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1807.387782] env[63379]: DEBUG nova.network.neutron [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Updating instance_info_cache with network_info: [{"id": "93473bbe-e8ba-4cf7-b6ad-8880f0752f3f", "address": "fa:16:3e:fb:a8:06", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93473bbe-e8", "ovs_interfaceid": "93473bbe-e8ba-4cf7-b6ad-8880f0752f3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.559958] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: acc8aa2f-41a8-4f06-8227-a1bae9c93f44] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1807.568640] env[63379]: DEBUG nova.compute.manager [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1807.577867] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.636986] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.641698] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.890944] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Releasing lock "refresh_cache-f99bad46-931d-497a-8586-b140309b0b45" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.891106] env[63379]: DEBUG nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Instance network_info: |[{"id": "93473bbe-e8ba-4cf7-b6ad-8880f0752f3f", "address": "fa:16:3e:fb:a8:06", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93473bbe-e8", "ovs_interfaceid": "93473bbe-e8ba-4cf7-b6ad-8880f0752f3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1807.891487] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:a8:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea00b53a-9c9b-4592-ab95-7e10473f338d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93473bbe-e8ba-4cf7-b6ad-8880f0752f3f', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1807.900164] env[63379]: DEBUG oslo.service.loopingcall [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.900744] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99bad46-931d-497a-8586-b140309b0b45] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1807.901121] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56eb32b1-d56b-4dcb-a486-b082071383af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.924134] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1807.924134] env[63379]: value = "task-1780096" [ 1807.924134] env[63379]: _type = "Task" [ 1807.924134] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.932464] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780096, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.065166] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 5c4ae6c6-538a-4724-ad77-340d9c60c24a] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1808.081939] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.087666] env[63379]: DEBUG nova.compute.manager [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Received event network-changed-93473bbe-e8ba-4cf7-b6ad-8880f0752f3f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1808.087767] env[63379]: DEBUG nova.compute.manager [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Refreshing instance network info cache due to event network-changed-93473bbe-e8ba-4cf7-b6ad-8880f0752f3f. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1808.088082] env[63379]: DEBUG oslo_concurrency.lockutils [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] Acquiring lock "refresh_cache-f99bad46-931d-497a-8586-b140309b0b45" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.088305] env[63379]: DEBUG oslo_concurrency.lockutils [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] Acquired lock "refresh_cache-f99bad46-931d-497a-8586-b140309b0b45" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.089028] env[63379]: DEBUG nova.network.neutron [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Refreshing network info cache for port 93473bbe-e8ba-4cf7-b6ad-8880f0752f3f {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1808.094763] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.095050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.140826] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.144635] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.434808] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780096, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.576070] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 8b07ef47-3615-41a5-acfd-87c1ad43b4b9] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1808.578204] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.600237] env[63379]: INFO nova.compute.claims [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1808.641100] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.650337] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.651050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.651050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.836401] env[63379]: DEBUG nova.network.neutron [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Updated VIF entry in instance network info cache for port 93473bbe-e8ba-4cf7-b6ad-8880f0752f3f. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1808.836832] env[63379]: DEBUG nova.network.neutron [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Updating instance_info_cache with network_info: [{"id": "93473bbe-e8ba-4cf7-b6ad-8880f0752f3f", "address": "fa:16:3e:fb:a8:06", "network": {"id": "55f3848c-4d4f-4c83-a3e6-bc7a6f7af3ce", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "eb95d75934bc4912a35f709406a98a65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea00b53a-9c9b-4592-ab95-7e10473f338d", "external-id": "nsx-vlan-transportzone-235", "segmentation_id": 235, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93473bbe-e8", "ovs_interfaceid": "93473bbe-e8ba-4cf7-b6ad-8880f0752f3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.935145] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780096, 'name': CreateVM_Task, 'duration_secs': 0.623934} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.935383] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f99bad46-931d-497a-8586-b140309b0b45] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1808.936048] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.936247] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.936597] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1808.936866] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77634a57-b5f0-457c-93a5-d20856e8dbf0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.941569] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1808.941569] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52023451-281e-45b9-53a3-cd901fa1cda8" [ 1808.941569] env[63379]: _type = "Task" [ 1808.941569] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.950091] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52023451-281e-45b9-53a3-cd901fa1cda8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.075037] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.080656] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 607f9774-0ffc-4ece-a7ba-419fdf6eb26b] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1809.106854] env[63379]: INFO nova.compute.resource_tracker [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating resource usage from migration 3b490a78-18fd-4882-b501-f1ff04f2cb79 [ 1809.140984] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.146335] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.153359] env[63379]: DEBUG nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1809.289734] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c659a9e-b879-40d3-84c4-b49f32ef535f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.296946] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdcebd3-6d84-4e1b-ad2a-f103a6833f8f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.327778] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfe63f6-957d-4c7c-ba6d-49cb4f269498 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.335115] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54c8bc9-405f-4ac2-86ba-016e9fac11d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.339209] env[63379]: DEBUG oslo_concurrency.lockutils [req-6467d016-9384-4922-8219-cdbdd803ff03 req-34400e71-eba1-485b-9bcb-2df8182a7cb8 service nova] Releasing lock "refresh_cache-f99bad46-931d-497a-8586-b140309b0b45" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.348957] env[63379]: DEBUG nova.compute.provider_tree [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.452220] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52023451-281e-45b9-53a3-cd901fa1cda8, 'name': SearchDatastore_Task, 'duration_secs': 0.123767} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.452582] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.452766] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1809.453034] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.453193] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.453373] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1809.453660] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48e4b32a-2fe9-4e8b-822f-da74a4e12e08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.462192] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1809.462367] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1809.463040] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-128debbe-875b-41dc-8ca1-305bd7465859 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.468163] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1809.468163] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0b268-2a2b-4793-68d3-ce24b1feaed8" [ 1809.468163] env[63379]: _type = "Task" [ 1809.468163] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.475441] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0b268-2a2b-4793-68d3-ce24b1feaed8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.576156] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780087, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.642892} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.576360] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 62494fa1-5990-490d-92ae-00607d7ebba1/62494fa1-5990-490d-92ae-00607d7ebba1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1809.576582] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1809.576825] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bfeaa91-088c-4f87-9ff3-3d81591fa1ca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.583729] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: ac596f08-86a3-42e0-86e6-41a173fe868f] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1809.585626] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1809.585626] env[63379]: value = "task-1780098" [ 1809.585626] env[63379]: _type = "Task" [ 1809.585626] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.593463] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780098, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.641766] env[63379]: DEBUG oslo_vmware.api [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.704543} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.645352] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1809.645458] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1809.645631] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1809.647291] env[63379]: INFO nova.compute.manager [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Took 3.91 seconds to destroy the instance on the hypervisor. [ 1809.647291] env[63379]: DEBUG oslo.service.loopingcall [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1809.647291] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.652166} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.647291] env[63379]: DEBUG nova.compute.manager [-] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1809.647291] env[63379]: DEBUG nova.network.neutron [-] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1809.648356] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1809.648558] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1809.648751] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1809.674696] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.852023] env[63379]: DEBUG nova.scheduler.client.report [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1809.978863] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0b268-2a2b-4793-68d3-ce24b1feaed8, 'name': SearchDatastore_Task, 'duration_secs': 0.008578} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.979643] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f0ab47-3f30-48d6-b557-99702035ae18 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.984673] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1809.984673] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed0581-9537-7ba2-34d6-d7ee762625f2" [ 1809.984673] env[63379]: _type = "Task" [ 1809.984673] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.991946] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed0581-9537-7ba2-34d6-d7ee762625f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.090408] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 2a996f06-542e-4f71-95a4-0f71097d1478] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1810.099323] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780098, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063996} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.099609] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1810.100838] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573c9ef2-4b46-4dda-ab7e-84ab11619204 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.125132] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 62494fa1-5990-490d-92ae-00607d7ebba1/62494fa1-5990-490d-92ae-00607d7ebba1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1810.126704] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53e25efd-cba0-4984-92cc-5c05e7c44603 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.141772] env[63379]: DEBUG nova.compute.manager [req-d5d1ee25-01e0-4f51-b974-63d51bc78e4d req-b8eb74c3-bd7f-4e8b-9d74-568ff4253bf7 service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Received event network-vif-deleted-3d75d6b5-820e-43f4-b349-f7d9d2137fee {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1810.141979] env[63379]: INFO nova.compute.manager [req-d5d1ee25-01e0-4f51-b974-63d51bc78e4d req-b8eb74c3-bd7f-4e8b-9d74-568ff4253bf7 service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Neutron deleted interface 3d75d6b5-820e-43f4-b349-f7d9d2137fee; detaching it from the instance and deleting it from the info cache [ 1810.142174] env[63379]: DEBUG nova.network.neutron [req-d5d1ee25-01e0-4f51-b974-63d51bc78e4d req-b8eb74c3-bd7f-4e8b-9d74-568ff4253bf7 service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.151016] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1810.151016] env[63379]: value = "task-1780099" [ 1810.151016] env[63379]: _type = "Task" [ 1810.151016] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.164223] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.357033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.261s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.357033] env[63379]: INFO nova.compute.manager [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Migrating [ 1810.364236] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.690s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.365796] env[63379]: INFO nova.compute.claims [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1810.381625] env[63379]: DEBUG nova.network.neutron [-] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.495046] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed0581-9537-7ba2-34d6-d7ee762625f2, 'name': SearchDatastore_Task, 'duration_secs': 0.0094} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.495338] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.495655] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f99bad46-931d-497a-8586-b140309b0b45/f99bad46-931d-497a-8586-b140309b0b45.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1810.495844] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4af3e76-5a6b-4fdd-9e55-4b21a9197063 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.502348] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1810.502348] env[63379]: value = "task-1780100" [ 1810.502348] env[63379]: _type = "Task" [ 1810.502348] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.509985] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.594227] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: ec1f7a44-7344-43fb-9d51-688731d8ce14] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1810.644259] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7377fe4d-e1ef-4757-b0e7-6b2631618e65 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.655358] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fcd355-c395-45af-bf1f-ee132a248a15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.679533] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780099, 'name': ReconfigVM_Task, 'duration_secs': 0.287309} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.679882] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 62494fa1-5990-490d-92ae-00607d7ebba1/62494fa1-5990-490d-92ae-00607d7ebba1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1810.680566] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df39cf6a-124a-4239-b0c5-93fd7e8385b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.694109] env[63379]: DEBUG nova.compute.manager [req-d5d1ee25-01e0-4f51-b974-63d51bc78e4d req-b8eb74c3-bd7f-4e8b-9d74-568ff4253bf7 service nova] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Detach interface failed, port_id=3d75d6b5-820e-43f4-b349-f7d9d2137fee, reason: Instance 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1810.698080] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1810.698080] env[63379]: value = "task-1780101" [ 1810.698080] env[63379]: _type = "Task" [ 1810.698080] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.705327] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1810.705604] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1810.705772] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1810.705961] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1810.706129] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1810.706284] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1810.706496] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1810.706659] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1810.706831] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1810.707077] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1810.707278] env[63379]: DEBUG nova.virt.hardware [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1810.708174] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae03b70-d61a-4897-a5e3-201b79f240a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.714892] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780101, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.720465] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a910ab-36e2-4ad1-a170-a204e8a4f149 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.735421] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:83:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf9adade-286a-4e50-a0a5-a80cd17209c6', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1810.743664] env[63379]: DEBUG oslo.service.loopingcall [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.744145] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1810.744365] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d51a3e9-dab0-4b15-827e-c89aff134bef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.765157] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1810.765157] env[63379]: value = "task-1780102" [ 1810.765157] env[63379]: _type = "Task" [ 1810.765157] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.776687] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780102, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.879327] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.879566] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.879752] env[63379]: DEBUG nova.network.neutron [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1810.883805] env[63379]: INFO nova.compute.manager [-] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Took 1.24 seconds to deallocate network for instance. [ 1811.013603] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780100, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.055195] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1811.055450] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369475', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'name': 'volume-e897c929-908c-41dd-b7b3-54172d033a9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19941838-d6b0-4fb8-9d06-f4a1b80ba428', 'attached_at': '', 'detached_at': '', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'serial': 'e897c929-908c-41dd-b7b3-54172d033a9c'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1811.056458] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f0dd88-a1eb-4f5a-bc20-71824b55658e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.074850] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6640d2-5081-4152-a2d3-850dd76461fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.102137] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] volume-e897c929-908c-41dd-b7b3-54172d033a9c/volume-e897c929-908c-41dd-b7b3-54172d033a9c.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1811.102696] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 38be0e8d-188b-4a98-aedc-5d941b63c000] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1811.104607] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f188c1f-06f7-4f4f-919e-7032aa8d0a34 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.124126] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1811.124126] env[63379]: value = "task-1780103" [ 1811.124126] env[63379]: _type = "Task" [ 1811.124126] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.133118] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780103, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.208921] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780101, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.274663] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780102, 'name': CreateVM_Task, 'duration_secs': 0.434954} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.274915] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1811.275534] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.275711] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.276061] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1811.276327] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-938c44a1-1924-4190-9908-f5768fb4a33f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.280731] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1811.280731] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521008ce-04b3-f76f-c944-8310072c5c77" [ 1811.280731] env[63379]: _type = "Task" [ 1811.280731] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.288016] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521008ce-04b3-f76f-c944-8310072c5c77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.393022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.514572] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.729955} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.514844] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] f99bad46-931d-497a-8586-b140309b0b45/f99bad46-931d-497a-8586-b140309b0b45.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1811.515118] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1811.515460] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-483e35e6-7d9e-47c5-92b8-90f8c8b3f2f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.524303] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1811.524303] env[63379]: value = "task-1780104" [ 1811.524303] env[63379]: _type = "Task" [ 1811.524303] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.532494] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.567849] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace19db8-0e47-4256-aef7-21239013643b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.575437] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b29c385-87f5-4985-82a2-f3209aadb33c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.607124] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8245bd78-f7f2-4dc7-b247-c32b2f717ecf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.614533] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2577afc-e4f1-448b-8a69-eea1a88cdfd2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.620224] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: f983d089-7cfc-46a5-8f8d-f49f67aef1da] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1811.636008] env[63379]: DEBUG nova.compute.provider_tree [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.642418] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.677890] env[63379]: DEBUG nova.network.neutron [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance_info_cache with network_info: [{"id": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "address": "fa:16:3e:a5:a9:b3", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap414f9be4-c9", "ovs_interfaceid": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.708297] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780101, 'name': Rename_Task, 'duration_secs': 0.619242} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.708551] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1811.708785] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fae9e4cc-6903-4ae7-8fd8-36780e3a7ff2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.714261] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1811.714261] env[63379]: value = "task-1780105" [ 1811.714261] env[63379]: _type = "Task" [ 1811.714261] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.721622] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780105, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.791310] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521008ce-04b3-f76f-c944-8310072c5c77, 'name': SearchDatastore_Task, 'duration_secs': 0.010268} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.791639] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.791878] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1811.792159] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.792317] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.792498] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1811.792757] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d669884-7724-46cf-b3c4-18f3899e9de4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.801244] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1811.801428] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1811.802135] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df4bb025-5a95-41c5-9bc7-1ed4af89497c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.806950] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1811.806950] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529d40e2-6f5e-df0f-a13a-481cd451fe67" [ 1811.806950] env[63379]: _type = "Task" [ 1811.806950] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.814355] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529d40e2-6f5e-df0f-a13a-481cd451fe67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.033661] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.123785] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 6e022c9a-642b-4d96-8195-e56809bbd7b9] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1812.134516] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780103, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.138449] env[63379]: DEBUG nova.scheduler.client.report [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1812.181114] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.224096] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780105, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.317832] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529d40e2-6f5e-df0f-a13a-481cd451fe67, 'name': SearchDatastore_Task, 'duration_secs': 0.008078} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.318665] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e4773a7-cdc0-43de-bbce-6b9951fb010e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.323623] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1812.323623] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52637b80-bc05-e51c-9b6e-362cd1dd1005" [ 1812.323623] env[63379]: _type = "Task" [ 1812.323623] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.330866] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52637b80-bc05-e51c-9b6e-362cd1dd1005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.536557] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.870929} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.536871] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1812.537678] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2a3620-7069-4b4a-bdcb-d16ede74367d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.559795] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] f99bad46-931d-497a-8586-b140309b0b45/f99bad46-931d-497a-8586-b140309b0b45.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1812.560089] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c994f446-e486-4989-a25f-6deb4bd2c02d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.580092] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1812.580092] env[63379]: value = "task-1780106" [ 1812.580092] env[63379]: _type = "Task" [ 1812.580092] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.588143] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780106, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.630188] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: f082cdd7-228e-4100-b301-5af6daea9b36] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1812.637492] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780103, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.644168] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.644168] env[63379]: DEBUG nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1812.646675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.254s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.646871] env[63379]: DEBUG nova.objects.instance [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lazy-loading 'resources' on Instance uuid 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1812.726141] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780105, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.838231] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52637b80-bc05-e51c-9b6e-362cd1dd1005, 'name': SearchDatastore_Task, 'duration_secs': 0.023062} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.838618] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.839016] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1812.839406] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bc48843-42b3-4f2e-b683-b378462e9283 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.847537] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1812.847537] env[63379]: value = "task-1780107" [ 1812.847537] env[63379]: _type = "Task" [ 1812.847537] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.855780] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.093485] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780106, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.133811] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: f10fe64d-a09e-488a-b609-3e38922cf2e0] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1813.141614] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780103, 'name': ReconfigVM_Task, 'duration_secs': 1.906556} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.142157] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Reconfigured VM instance instance-0000004f to attach disk [datastore1] volume-e897c929-908c-41dd-b7b3-54172d033a9c/volume-e897c929-908c-41dd-b7b3-54172d033a9c.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1813.150741] env[63379]: DEBUG nova.compute.utils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1813.156630] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ca113fe-22b3-484e-a760-3862644dcb72 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.170583] env[63379]: DEBUG nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1813.173820] env[63379]: DEBUG nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1813.173820] env[63379]: DEBUG nova.network.neutron [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1813.183869] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1813.183869] env[63379]: value = "task-1780108" [ 1813.183869] env[63379]: _type = "Task" [ 1813.183869] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.198820] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780108, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.223285] env[63379]: DEBUG nova.policy [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '756ff556130a4855b461899fece1e1fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3363a90de2d4d5988ddd03974c10d0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1813.230494] env[63379]: DEBUG oslo_vmware.api [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780105, 'name': PowerOnVM_Task, 'duration_secs': 1.297063} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.230849] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1813.231356] env[63379]: INFO nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Took 11.98 seconds to spawn the instance on the hypervisor. [ 1813.231356] env[63379]: DEBUG nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1813.232129] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ef6319-6b51-410e-abc1-215d7c040da3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.358594] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780107, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.404767] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bd047a-56fb-49db-9303-8cb99a8b984b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.413657] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d56a4f-ecaa-4602-911c-ef9bdccf8438 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.447704] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0eb11f-2ae8-43fb-abb3-9709f4a55cc2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.456637] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d2ce8f-4d4f-4f19-a2df-1260c0811955 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.471431] env[63379]: DEBUG nova.compute.provider_tree [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.549991] env[63379]: DEBUG nova.network.neutron [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Successfully created port: f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1813.592124] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780106, 'name': ReconfigVM_Task, 'duration_secs': 0.720499} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.592517] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Reconfigured VM instance instance-00000060 to attach disk [datastore1] f99bad46-931d-497a-8586-b140309b0b45/f99bad46-931d-497a-8586-b140309b0b45.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1813.593182] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51a628db-c888-447e-acb0-e0836cd4d76d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.600569] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1813.600569] env[63379]: value = "task-1780109" [ 1813.600569] env[63379]: _type = "Task" [ 1813.600569] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.611095] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780109, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.642850] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 158fe346-93f5-422b-877a-8423547da58f] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1813.692850] env[63379]: DEBUG oslo_vmware.api [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780108, 'name': ReconfigVM_Task, 'duration_secs': 0.217933} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.695671] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369475', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'name': 'volume-e897c929-908c-41dd-b7b3-54172d033a9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19941838-d6b0-4fb8-9d06-f4a1b80ba428', 'attached_at': '', 'detached_at': '', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'serial': 'e897c929-908c-41dd-b7b3-54172d033a9c'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1813.697768] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e410153-90da-4209-af66-ebe13c778fc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.718037] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance '9faef8ba-2263-4af8-ba5b-13a17b4275b6' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1813.755267] env[63379]: INFO nova.compute.manager [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Took 19.72 seconds to build instance. [ 1813.858153] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.797923} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.858439] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1813.858660] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1813.858917] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bba54ea4-fe79-46e9-9a51-50b3bcb97770 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.864958] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1813.864958] env[63379]: value = "task-1780110" [ 1813.864958] env[63379]: _type = "Task" [ 1813.864958] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.872399] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780110, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.975375] env[63379]: DEBUG nova.scheduler.client.report [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1814.112998] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780109, 'name': Rename_Task, 'duration_secs': 0.145403} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.113689] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1814.113689] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9572535-2606-404a-81b4-2efdbffbef28 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.119929] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1814.119929] env[63379]: value = "task-1780111" [ 1814.119929] env[63379]: _type = "Task" [ 1814.119929] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.128488] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.146523] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 5aa36799-251b-4933-8ccd-8125995b1f8b] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1814.183547] env[63379]: DEBUG nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1814.225187] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1814.227557] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1814.227774] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1814.227955] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1814.228139] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1814.228286] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1814.228434] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1814.228638] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1814.228794] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1814.228976] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1814.229185] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1814.229365] env[63379]: DEBUG nova.virt.hardware [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1814.229642] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72090626-1243-4815-8ae2-509a2128df66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.231948] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338b7873-a1f1-4c12-b88b-f56feab410aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.241721] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279f0746-54a3-40d6-ae62-a12fa0432ac5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.245805] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1814.245805] env[63379]: value = "task-1780112" [ 1814.245805] env[63379]: _type = "Task" [ 1814.245805] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.260908] env[63379]: DEBUG oslo_concurrency.lockutils [None req-446ba734-64ed-42a6-88a4-bf42ae611328 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "62494fa1-5990-490d-92ae-00607d7ebba1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.237s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.269977] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780112, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.377392] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780110, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075901} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.377672] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1814.378458] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d5a72c-9872-469d-a8ac-7f43a5918451 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.401841] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1814.402176] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51b6ff1f-6e24-40ab-8f7e-8889e734a8ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.422115] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1814.422115] env[63379]: value = "task-1780113" [ 1814.422115] env[63379]: _type = "Task" [ 1814.422115] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.431402] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780113, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.481050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.834s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.508214] env[63379]: INFO nova.scheduler.client.report [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleted allocations for instance 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9 [ 1814.630227] env[63379]: DEBUG oslo_vmware.api [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780111, 'name': PowerOnVM_Task, 'duration_secs': 0.496686} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.630607] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1814.630846] env[63379]: INFO nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1814.631044] env[63379]: DEBUG nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1814.631821] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b196b821-c95a-4bc7-a238-491d9fa22e73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.649652] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 650d4709-3cbc-4b9a-b165-66fa0af97c4d] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1814.698628] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de52cb3c-2973-4795-a6d2-755243103999 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.705778] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcff65c-74b8-4bf7-9782-d4ee721399d6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Suspending the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1814.706093] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2aad9438-87f6-4169-83f4-a29f8512e8b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.711585] env[63379]: DEBUG oslo_vmware.api [None req-0fcff65c-74b8-4bf7-9782-d4ee721399d6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1814.711585] env[63379]: value = "task-1780114" [ 1814.711585] env[63379]: _type = "Task" [ 1814.711585] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.718882] env[63379]: DEBUG oslo_vmware.api [None req-0fcff65c-74b8-4bf7-9782-d4ee721399d6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780114, 'name': SuspendVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.753352] env[63379]: DEBUG nova.objects.instance [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'flavor' on Instance uuid 19941838-d6b0-4fb8-9d06-f4a1b80ba428 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1814.760765] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780112, 'name': PowerOffVM_Task, 'duration_secs': 0.318917} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.761299] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1814.761646] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance '9faef8ba-2263-4af8-ba5b-13a17b4275b6' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1814.934208] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780113, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.019933] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aec56a88-5904-42bd-8346-e7a6842f6058 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "3b662a31-76b9-4ac8-a6bd-bc4983f7fec9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.286s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.150516] env[63379]: INFO nova.compute.manager [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Took 15.43 seconds to build instance. [ 1815.155496] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 318355e9-b4cc-4645-ac51-b583d14e1134] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1815.223255] env[63379]: DEBUG oslo_vmware.api [None req-0fcff65c-74b8-4bf7-9782-d4ee721399d6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780114, 'name': SuspendVM_Task} progress is 62%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.264835] env[63379]: DEBUG oslo_concurrency.lockutils [None req-47fe0363-4c42-4dbd-9feb-fd3131e4b349 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.821s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.270483] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1815.270740] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1815.270902] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1815.271107] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1815.271497] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1815.271497] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1815.271723] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1815.271772] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1815.272017] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1815.272234] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1815.272423] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1815.281744] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3597309-6d07-4452-a036-4947c1f5a3b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.294380] env[63379]: DEBUG nova.compute.manager [req-f79b4c0a-dc1c-4058-a8e9-a9af01edc8f6 req-21ddf786-944a-4654-8335-8a75d09e39c1 service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Received event network-vif-plugged-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1815.294724] env[63379]: DEBUG oslo_concurrency.lockutils [req-f79b4c0a-dc1c-4058-a8e9-a9af01edc8f6 req-21ddf786-944a-4654-8335-8a75d09e39c1 service nova] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.295039] env[63379]: DEBUG oslo_concurrency.lockutils [req-f79b4c0a-dc1c-4058-a8e9-a9af01edc8f6 req-21ddf786-944a-4654-8335-8a75d09e39c1 service nova] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.295267] env[63379]: DEBUG oslo_concurrency.lockutils [req-f79b4c0a-dc1c-4058-a8e9-a9af01edc8f6 req-21ddf786-944a-4654-8335-8a75d09e39c1 service nova] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.295484] env[63379]: DEBUG nova.compute.manager [req-f79b4c0a-dc1c-4058-a8e9-a9af01edc8f6 req-21ddf786-944a-4654-8335-8a75d09e39c1 service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] No waiting events found dispatching network-vif-plugged-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1815.295705] env[63379]: WARNING nova.compute.manager [req-f79b4c0a-dc1c-4058-a8e9-a9af01edc8f6 req-21ddf786-944a-4654-8335-8a75d09e39c1 service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Received unexpected event network-vif-plugged-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 for instance with vm_state building and task_state spawning. [ 1816.020359] env[63379]: DEBUG nova.network.neutron [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Successfully updated port: f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1816.023215] env[63379]: DEBUG oslo_concurrency.lockutils [None req-566715e3-aa02-4794-a0f8-4ea5d77c93f2 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "f99bad46-931d-497a-8586-b140309b0b45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.309s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.023547] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: c439fe86-fc43-4c05-a4b7-3634a043269a] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1816.025255] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.025487] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.025680] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.025869] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.026044] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.029577] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1816.029577] env[63379]: value = "task-1780115" [ 1816.029577] env[63379]: _type = "Task" [ 1816.029577] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.030300] env[63379]: INFO nova.compute.manager [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Terminating instance [ 1816.038616] env[63379]: DEBUG nova.compute.manager [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1816.038824] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1816.042501] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1a4994-dde9-4d2d-ae57-ca368fae6d84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.050799] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780113, 'name': ReconfigVM_Task, 'duration_secs': 0.518709} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.057913] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Reconfigured VM instance instance-0000004c to attach disk [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd/a7cce485-7476-4ea1-b127-68d879e164cd.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1816.058806] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780115, 'name': ReconfigVM_Task, 'duration_secs': 0.230397} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.059309] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1816.059560] env[63379]: DEBUG oslo_vmware.api [None req-0fcff65c-74b8-4bf7-9782-d4ee721399d6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780114, 'name': SuspendVM_Task, 'duration_secs': 0.813345} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.059746] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-229dff0f-8be7-43cd-8402-f83fc78febd4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.061261] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance '9faef8ba-2263-4af8-ba5b-13a17b4275b6' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1816.064534] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-177c1151-5fd6-4bc6-b3f8-60fdbd35c2af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.065805] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0fcff65c-74b8-4bf7-9782-d4ee721399d6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Suspended the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1816.065998] env[63379]: DEBUG nova.compute.manager [None req-0fcff65c-74b8-4bf7-9782-d4ee721399d6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1816.067126] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedd0dbf-83f7-4b99-b33d-e66e0858034d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.077420] env[63379]: DEBUG oslo_vmware.api [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1816.077420] env[63379]: value = "task-1780117" [ 1816.077420] env[63379]: _type = "Task" [ 1816.077420] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.077756] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1816.077756] env[63379]: value = "task-1780116" [ 1816.077756] env[63379]: _type = "Task" [ 1816.077756] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.088384] env[63379]: DEBUG oslo_vmware.api [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.091441] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780116, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.258762] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "815d0af5-e9a8-4475-9414-42715ea32d6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.259027] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.531429] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.531627] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.531796] env[63379]: DEBUG nova.network.neutron [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1816.533299] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e838f54f-99f2-4f39-a9d2-725be8a5b3ce] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1816.534940] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "f99bad46-931d-497a-8586-b140309b0b45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.535178] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "f99bad46-931d-497a-8586-b140309b0b45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.535381] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "f99bad46-931d-497a-8586-b140309b0b45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.535574] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "f99bad46-931d-497a-8586-b140309b0b45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.535776] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "f99bad46-931d-497a-8586-b140309b0b45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.537723] env[63379]: INFO nova.compute.manager [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Terminating instance [ 1816.539366] env[63379]: DEBUG nova.compute.manager [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1816.539473] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1816.540346] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585baeb8-9c18-4760-80a1-6ec8ed43b885 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.549867] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1816.550192] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d6bdd88-a926-47c9-8139-8c29c9045a1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.556204] env[63379]: DEBUG oslo_vmware.api [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1816.556204] env[63379]: value = "task-1780118" [ 1816.556204] env[63379]: _type = "Task" [ 1816.556204] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.565716] env[63379]: DEBUG oslo_vmware.api [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.568771] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1816.569013] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1816.569185] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1816.569394] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1816.569554] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1816.569700] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1816.569898] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1816.570068] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1816.570235] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1816.570395] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1816.570566] env[63379]: DEBUG nova.virt.hardware [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1816.575745] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Reconfiguring VM instance instance-0000005e to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1816.576319] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a91d369c-10f3-48a1-aa09-e3ff9894b298 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.603162] env[63379]: DEBUG oslo_vmware.api [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780117, 'name': PowerOffVM_Task, 'duration_secs': 0.196594} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.607299] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1816.607475] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1816.607772] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1816.607772] env[63379]: value = "task-1780119" [ 1816.607772] env[63379]: _type = "Task" [ 1816.607772] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.608236] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780116, 'name': Rename_Task, 'duration_secs': 0.412835} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.608437] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a712aac7-b282-4392-a944-367be5493a19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.609913] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1816.612724] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c671ee38-a2ca-4b13-9223-9a7675559cf4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.619582] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780119, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.620756] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1816.620756] env[63379]: value = "task-1780121" [ 1816.620756] env[63379]: _type = "Task" [ 1816.620756] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.629464] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.712113] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1816.712356] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1816.712540] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleting the datastore file [datastore1] 1d2de9da-9dfe-42d2-b206-bb5139b1970b {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1816.712882] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd12c7df-1dd6-45fa-8fb1-7f60efc05133 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.719584] env[63379]: DEBUG oslo_vmware.api [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for the task: (returnval){ [ 1816.719584] env[63379]: value = "task-1780122" [ 1816.719584] env[63379]: _type = "Task" [ 1816.719584] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.727419] env[63379]: DEBUG oslo_vmware.api [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.761843] env[63379]: DEBUG nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1817.040987] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 2f98800d-800f-4ad7-bd65-f12879f02ce5] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1817.067084] env[63379]: DEBUG oslo_vmware.api [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780118, 'name': PowerOffVM_Task, 'duration_secs': 0.360939} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.068128] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1817.068327] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1817.068577] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2587e4cb-6fdb-43e4-8b44-796c2ce66322 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.081553] env[63379]: DEBUG nova.network.neutron [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1817.119219] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780119, 'name': ReconfigVM_Task, 'duration_secs': 0.167826} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.119501] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Reconfigured VM instance instance-0000005e to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1817.120318] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945be3a0-e630-4e1e-bf39-84e3567db11b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.146387] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 9faef8ba-2263-4af8-ba5b-13a17b4275b6/9faef8ba-2263-4af8-ba5b-13a17b4275b6.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1817.152045] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cd1b823-7559-4822-93c8-5dd6c2032aac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.164578] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780121, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.172948] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1817.172948] env[63379]: value = "task-1780124" [ 1817.172948] env[63379]: _type = "Task" [ 1817.172948] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.179650] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1817.179871] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1817.180064] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Deleting the datastore file [datastore1] f99bad46-931d-497a-8586-b140309b0b45 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1817.183359] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66de3d30-b667-4cee-8b2e-3e9ce2378e93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.185167] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.188966] env[63379]: DEBUG oslo_vmware.api [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for the task: (returnval){ [ 1817.188966] env[63379]: value = "task-1780125" [ 1817.188966] env[63379]: _type = "Task" [ 1817.188966] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.196500] env[63379]: DEBUG oslo_vmware.api [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.229943] env[63379]: DEBUG oslo_vmware.api [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Task: {'id': task-1780122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171767} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.230211] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1817.230446] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1817.230646] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1817.230846] env[63379]: INFO nova.compute.manager [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1817.231114] env[63379]: DEBUG oslo.service.loopingcall [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.231348] env[63379]: DEBUG nova.compute.manager [-] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1817.231459] env[63379]: DEBUG nova.network.neutron [-] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1817.260250] env[63379]: DEBUG nova.network.neutron [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updating instance_info_cache with network_info: [{"id": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "address": "fa:16:3e:7f:3c:a0", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fd4937-49", "ovs_interfaceid": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.290677] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.291031] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.293986] env[63379]: INFO nova.compute.claims [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1817.315771] env[63379]: DEBUG nova.compute.manager [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Received event network-changed-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1817.316015] env[63379]: DEBUG nova.compute.manager [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Refreshing instance network info cache due to event network-changed-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1817.316279] env[63379]: DEBUG oslo_concurrency.lockutils [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] Acquiring lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.404415] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "62494fa1-5990-490d-92ae-00607d7ebba1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.404710] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "62494fa1-5990-490d-92ae-00607d7ebba1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.404992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "62494fa1-5990-490d-92ae-00607d7ebba1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.405189] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "62494fa1-5990-490d-92ae-00607d7ebba1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.405386] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "62494fa1-5990-490d-92ae-00607d7ebba1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.408439] env[63379]: INFO nova.compute.manager [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Terminating instance [ 1817.410212] env[63379]: DEBUG nova.compute.manager [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1817.410434] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1817.411395] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd910e88-d1c2-410d-98b5-6b6cafe34611 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.419672] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1817.419951] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5918f096-9230-4b7f-8de2-01f3e6b7face {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.494634] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1817.494882] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1817.495089] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleting the datastore file [datastore1] 62494fa1-5990-490d-92ae-00607d7ebba1 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1817.495368] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30fee9a1-d9e0-4bac-8c69-47f73d72520f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.502338] env[63379]: DEBUG oslo_vmware.api [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1817.502338] env[63379]: value = "task-1780127" [ 1817.502338] env[63379]: _type = "Task" [ 1817.502338] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.509922] env[63379]: DEBUG oslo_vmware.api [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.545093] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0324da80-b97c-4dc9-9083-199fbda60341] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1817.635201] env[63379]: DEBUG oslo_vmware.api [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780121, 'name': PowerOnVM_Task, 'duration_secs': 0.822769} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.635556] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1817.635851] env[63379]: DEBUG nova.compute.manager [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1817.637361] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b144e8-725b-4994-a4fb-54a22beafe35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.683370] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780124, 'name': ReconfigVM_Task, 'duration_secs': 0.374048} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.683635] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 9faef8ba-2263-4af8-ba5b-13a17b4275b6/9faef8ba-2263-4af8-ba5b-13a17b4275b6.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1817.683907] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance '9faef8ba-2263-4af8-ba5b-13a17b4275b6' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1817.698067] env[63379]: DEBUG oslo_vmware.api [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Task: {'id': task-1780125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141211} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.698322] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1817.698578] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1817.698775] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1817.698949] env[63379]: INFO nova.compute.manager [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] [instance: f99bad46-931d-497a-8586-b140309b0b45] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1817.699201] env[63379]: DEBUG oslo.service.loopingcall [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.699391] env[63379]: DEBUG nova.compute.manager [-] [instance: f99bad46-931d-497a-8586-b140309b0b45] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1817.699487] env[63379]: DEBUG nova.network.neutron [-] [instance: f99bad46-931d-497a-8586-b140309b0b45] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1817.766052] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.766304] env[63379]: DEBUG nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Instance network_info: |[{"id": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "address": "fa:16:3e:7f:3c:a0", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fd4937-49", "ovs_interfaceid": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1817.766611] env[63379]: DEBUG oslo_concurrency.lockutils [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] Acquired lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.766790] env[63379]: DEBUG nova.network.neutron [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Refreshing network info cache for port f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1817.768439] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:3c:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7fd4937-49e3-4d89-8fed-cc6c052fc1c0', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1817.775821] env[63379]: DEBUG oslo.service.loopingcall [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.778807] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1817.778969] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f90a29dc-d86c-458d-8ec8-b70f2de85f4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.800461] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1817.800461] env[63379]: value = "task-1780128" [ 1817.800461] env[63379]: _type = "Task" [ 1817.800461] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.808719] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780128, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.980138] env[63379]: DEBUG nova.network.neutron [-] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.013687] env[63379]: DEBUG oslo_vmware.api [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138335} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.013798] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1818.013987] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1818.014186] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1818.014362] env[63379]: INFO nova.compute.manager [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1818.014599] env[63379]: DEBUG oslo.service.loopingcall [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.014788] env[63379]: DEBUG nova.compute.manager [-] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1818.014879] env[63379]: DEBUG nova.network.neutron [-] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1818.048693] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 758ade2c-7f75-4907-95d5-681d5792ae31] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1818.157488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.191029] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68740d48-8e7b-40fe-a024-f5dff2b90b2c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.211298] env[63379]: DEBUG nova.network.neutron [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updated VIF entry in instance network info cache for port f7fd4937-49e3-4d89-8fed-cc6c052fc1c0. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1818.211843] env[63379]: DEBUG nova.network.neutron [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updating instance_info_cache with network_info: [{"id": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "address": "fa:16:3e:7f:3c:a0", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fd4937-49", "ovs_interfaceid": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.213931] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9d7a95-a46d-4e7f-a049-958aed64c341 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.232071] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance '9faef8ba-2263-4af8-ba5b-13a17b4275b6' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1818.313545] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780128, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.484253] env[63379]: INFO nova.compute.manager [-] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Took 1.25 seconds to deallocate network for instance. [ 1818.514302] env[63379]: DEBUG nova.network.neutron [-] [instance: f99bad46-931d-497a-8586-b140309b0b45] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.520197] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13d892f-536c-4640-8359-22b896361587 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.531255] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3243f7b-3eed-45cb-b268-1494a41b4edc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.560954] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aedff32b-b0c2-4a93-a2c6-349d26839cc4] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1818.563391] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da790275-25c5-4498-8443-bd41e8a5e2ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.571377] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cc85f0-8f43-4960-bb7d-8bd8079d42ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.584278] env[63379]: DEBUG nova.compute.provider_tree [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.717472] env[63379]: DEBUG oslo_concurrency.lockutils [req-836fa813-0781-40e3-ac1a-8f87a060e1df req-5a411d37-826f-4e10-9ca5-53411ce942af service nova] Releasing lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.771338] env[63379]: DEBUG nova.network.neutron [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Port 414f9be4-c922-4750-817c-32bc1d4ac6c4 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1818.811845] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780128, 'name': CreateVM_Task, 'duration_secs': 0.737914} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.812011] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1818.812697] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.812887] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.813225] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1818.813475] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aed38465-e0db-4a88-a46c-2035820f1c1c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.818352] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1818.818352] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b17d9c-e552-2a74-9bb6-a2d20f02fd5a" [ 1818.818352] env[63379]: _type = "Task" [ 1818.818352] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.826221] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b17d9c-e552-2a74-9bb6-a2d20f02fd5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.855492] env[63379]: DEBUG nova.network.neutron [-] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.991146] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.017393] env[63379]: INFO nova.compute.manager [-] [instance: f99bad46-931d-497a-8586-b140309b0b45] Took 1.32 seconds to deallocate network for instance. [ 1819.066557] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aa44a4ff-14e5-42d2-a082-06fe0ae9646c] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1819.087867] env[63379]: DEBUG nova.scheduler.client.report [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1819.331449] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b17d9c-e552-2a74-9bb6-a2d20f02fd5a, 'name': SearchDatastore_Task, 'duration_secs': 0.009649} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.331773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.332080] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1819.332343] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.332526] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.332674] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1819.332931] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27c3cb61-f4ad-4c1a-8eda-200ad02fee83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.341290] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1819.341469] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1819.342177] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-531dcc3f-a71e-4e87-986f-24ec6663a513 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.351579] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1819.351579] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0762e-dd5f-b61a-a995-1cdd13e2eb7f" [ 1819.351579] env[63379]: _type = "Task" [ 1819.351579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.353158] env[63379]: DEBUG nova.compute.manager [req-56c6b473-d2de-49ba-9617-eeb9c823daf3 req-bab42fac-9d45-4f50-8ecc-87cd67793549 service nova] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Received event network-vif-deleted-8e6b3d77-6a88-493c-9ef0-bae55a6dbbc3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1819.353365] env[63379]: DEBUG nova.compute.manager [req-56c6b473-d2de-49ba-9617-eeb9c823daf3 req-bab42fac-9d45-4f50-8ecc-87cd67793549 service nova] [instance: f99bad46-931d-497a-8586-b140309b0b45] Received event network-vif-deleted-93473bbe-e8ba-4cf7-b6ad-8880f0752f3f {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1819.353544] env[63379]: DEBUG nova.compute.manager [req-56c6b473-d2de-49ba-9617-eeb9c823daf3 req-bab42fac-9d45-4f50-8ecc-87cd67793549 service nova] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Received event network-vif-deleted-141d6d25-1ead-4801-ad93-5aaf2b65562c {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1819.357552] env[63379]: INFO nova.compute.manager [-] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Took 1.34 seconds to deallocate network for instance. [ 1819.362854] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0762e-dd5f-b61a-a995-1cdd13e2eb7f, 'name': SearchDatastore_Task, 'duration_secs': 0.008582} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.366133] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-397bd964-ba03-4674-8059-b450c9527908 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.371138] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1819.371138] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dcfd84-4225-22b9-a883-082acc160d0c" [ 1819.371138] env[63379]: _type = "Task" [ 1819.371138] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.378308] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dcfd84-4225-22b9-a883-082acc160d0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.524200] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.569968] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: a6a578ba-f8d0-459a-b0b7-5e40cc67e5a6] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1819.593034] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.593549] env[63379]: DEBUG nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1819.596221] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.439s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.596409] env[63379]: DEBUG nova.objects.instance [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1819.793094] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.793349] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.793532] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.867813] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.882106] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dcfd84-4225-22b9-a883-082acc160d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.008573} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.882365] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.882620] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1819.882869] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f74a168-66c1-4101-82a8-a1255ae6a852 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.889826] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1819.889826] env[63379]: value = "task-1780129" [ 1819.889826] env[63379]: _type = "Task" [ 1819.889826] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.897568] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.072914] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: ee36cc5f-61a1-4e4f-9cae-670f5868d90c] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1820.105786] env[63379]: DEBUG nova.compute.utils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1820.108300] env[63379]: DEBUG nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1820.108512] env[63379]: DEBUG nova.network.neutron [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1820.170603] env[63379]: DEBUG nova.policy [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4daab3ae5955497a9d25b4ef59118d0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba1a1cf17f9941b299a6102689835f88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1820.402206] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780129, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.492805] env[63379]: DEBUG nova.network.neutron [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Successfully created port: e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1820.576565] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: a78feafb-00bc-44c4-acd3-a36fb8a81767] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1820.609210] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f8fb2df3-6b1c-4b65-8e15-fba4c20f809e tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.610649] env[63379]: DEBUG nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1820.613354] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.622s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.613571] env[63379]: DEBUG nova.objects.instance [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lazy-loading 'resources' on Instance uuid 1d2de9da-9dfe-42d2-b206-bb5139b1970b {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1820.827901] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.828182] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.828388] env[63379]: DEBUG nova.network.neutron [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1820.902132] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.646033} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.902550] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1820.902893] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1820.903261] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2effd49c-67e0-4a2c-9881-7391e00123cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.910053] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1820.910053] env[63379]: value = "task-1780130" [ 1820.910053] env[63379]: _type = "Task" [ 1820.910053] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.917053] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.079736] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 07cc8cd7-8368-41dd-ae13-01c8275cac9e] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1821.287045] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9a5c85-25b4-464d-ba2d-003434967705 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.294616] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19bb4046-88b6-42f7-83bb-ba7610dbfc7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.324955] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d244bc4a-e413-4b4f-9e66-4466d5b55b61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.334410] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669510e8-0646-48c2-85bf-d5a86dedd8f8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.350219] env[63379]: DEBUG nova.compute.provider_tree [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1821.423421] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067685} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.424200] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1821.425345] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c514f54-0786-4a15-88c3-2af7cb47033e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.453825] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1821.454127] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3e13c0f-8ceb-4777-83e2-8499aca09c48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.475217] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1821.475217] env[63379]: value = "task-1780131" [ 1821.475217] env[63379]: _type = "Task" [ 1821.475217] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.483233] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780131, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.562762] env[63379]: DEBUG nova.network.neutron [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance_info_cache with network_info: [{"id": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "address": "fa:16:3e:a5:a9:b3", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap414f9be4-c9", "ovs_interfaceid": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.582769] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 04234ba7-24a3-48e5-9f62-6f4dddd0054a] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1821.623566] env[63379]: DEBUG nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1821.649956] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1821.650228] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1821.650392] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1821.650578] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1821.650724] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1821.650870] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1821.651087] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1821.651251] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1821.651418] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1821.651582] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1821.651754] env[63379]: DEBUG nova.virt.hardware [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1821.652619] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d48abd-3c1e-4e65-bfae-b880c84255ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.660594] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afa0d1f-fd52-4710-a894-c1dc9b8bbb6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.852848] env[63379]: DEBUG nova.scheduler.client.report [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1821.888596] env[63379]: DEBUG nova.compute.manager [req-a1b35f76-39ef-43c8-aabe-190a65949b0f req-f95e0703-b53d-43a5-880c-23373391fec8 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Received event network-vif-plugged-e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1821.888841] env[63379]: DEBUG oslo_concurrency.lockutils [req-a1b35f76-39ef-43c8-aabe-190a65949b0f req-f95e0703-b53d-43a5-880c-23373391fec8 service nova] Acquiring lock "815d0af5-e9a8-4475-9414-42715ea32d6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.889084] env[63379]: DEBUG oslo_concurrency.lockutils [req-a1b35f76-39ef-43c8-aabe-190a65949b0f req-f95e0703-b53d-43a5-880c-23373391fec8 service nova] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.889295] env[63379]: DEBUG oslo_concurrency.lockutils [req-a1b35f76-39ef-43c8-aabe-190a65949b0f req-f95e0703-b53d-43a5-880c-23373391fec8 service nova] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.889507] env[63379]: DEBUG nova.compute.manager [req-a1b35f76-39ef-43c8-aabe-190a65949b0f req-f95e0703-b53d-43a5-880c-23373391fec8 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] No waiting events found dispatching network-vif-plugged-e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1821.889616] env[63379]: WARNING nova.compute.manager [req-a1b35f76-39ef-43c8-aabe-190a65949b0f req-f95e0703-b53d-43a5-880c-23373391fec8 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Received unexpected event network-vif-plugged-e11104ca-6957-4cad-9666-a5c91da87b62 for instance with vm_state building and task_state spawning. [ 1821.985726] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780131, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.065650] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.085621] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 915aec20-5765-4aad-8b0f-f2d71b7d6428] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1822.180039] env[63379]: DEBUG nova.network.neutron [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Successfully updated port: e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1822.357197] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.359676] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.836s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.359914] env[63379]: DEBUG nova.objects.instance [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lazy-loading 'resources' on Instance uuid f99bad46-931d-497a-8586-b140309b0b45 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1822.380040] env[63379]: INFO nova.scheduler.client.report [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Deleted allocations for instance 1d2de9da-9dfe-42d2-b206-bb5139b1970b [ 1822.486849] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780131, 'name': ReconfigVM_Task, 'duration_secs': 0.515657} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.488074] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1822.488294] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2033b739-aaa2-498f-a8a8-10b5872f45e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.495593] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1822.495593] env[63379]: value = "task-1780132" [ 1822.495593] env[63379]: _type = "Task" [ 1822.495593] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.503303] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780132, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.586721] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b627a198-d8cd-40fe-8238-4b1e0b382a40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.589682] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 08465a2c-1ab6-4c53-9b12-3cd51c717b03] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1822.607524] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d5ae41-2f7d-4e86-b7f2-bce1b179317e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.614700] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance '9faef8ba-2263-4af8-ba5b-13a17b4275b6' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1822.682327] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.682489] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.682610] env[63379]: DEBUG nova.network.neutron [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.888518] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8136c620-cbc0-43ec-bc67-341a4664a0d5 tempest-ServerRescueNegativeTestJSON-240449381 tempest-ServerRescueNegativeTestJSON-240449381-project-member] Lock "1d2de9da-9dfe-42d2-b206-bb5139b1970b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.863s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.006072] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780132, 'name': Rename_Task, 'duration_secs': 0.143957} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.008339] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1823.009295] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa1b572d-165c-463d-aa1c-6f9f5630196d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.015562] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1823.015562] env[63379]: value = "task-1780133" [ 1823.015562] env[63379]: _type = "Task" [ 1823.015562] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.019996] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7089dfc-7635-4e27-bced-bc6cbd129119 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.027440] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.030092] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ccaed5-90b5-4815-b120-89c3e40e43f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.062579] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92d005b-fa50-4353-a832-3f2bc96e2ac1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.070922] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4869c76c-bf28-4719-9d48-affc79396d30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.084927] env[63379]: DEBUG nova.compute.provider_tree [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.092224] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: d2f5b406-3d0e-4150-aeaf-7cdacbc12c06] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1823.120562] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1823.121113] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af4248f8-256f-4aed-b5b9-183525ff8169 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.128590] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1823.128590] env[63379]: value = "task-1780134" [ 1823.128590] env[63379]: _type = "Task" [ 1823.128590] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.136196] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.226371] env[63379]: DEBUG nova.network.neutron [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1823.460085] env[63379]: DEBUG nova.network.neutron [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Updating instance_info_cache with network_info: [{"id": "e11104ca-6957-4cad-9666-a5c91da87b62", "address": "fa:16:3e:dc:48:9c", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape11104ca-69", "ovs_interfaceid": "e11104ca-6957-4cad-9666-a5c91da87b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.526144] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.587190] env[63379]: DEBUG nova.scheduler.client.report [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1823.595404] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 941ac23c-6aa9-4ed1-840a-326423b7cbc0] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1823.638066] env[63379]: DEBUG oslo_vmware.api [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780134, 'name': PowerOnVM_Task, 'duration_secs': 0.393682} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.638374] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1823.638569] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76afadbe-2914-4b5d-865f-9a5e6867176d tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance '9faef8ba-2263-4af8-ba5b-13a17b4275b6' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1823.913799] env[63379]: DEBUG nova.compute.manager [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Received event network-changed-e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1823.913799] env[63379]: DEBUG nova.compute.manager [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Refreshing instance network info cache due to event network-changed-e11104ca-6957-4cad-9666-a5c91da87b62. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1823.913976] env[63379]: DEBUG oslo_concurrency.lockutils [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] Acquiring lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.963723] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.964072] env[63379]: DEBUG nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Instance network_info: |[{"id": "e11104ca-6957-4cad-9666-a5c91da87b62", "address": "fa:16:3e:dc:48:9c", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape11104ca-69", "ovs_interfaceid": "e11104ca-6957-4cad-9666-a5c91da87b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1823.964457] env[63379]: DEBUG oslo_concurrency.lockutils [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] Acquired lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.964651] env[63379]: DEBUG nova.network.neutron [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Refreshing network info cache for port e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1823.965986] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:48:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1195acd-707f-4bac-a99d-14db17a63802', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e11104ca-6957-4cad-9666-a5c91da87b62', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1823.974448] env[63379]: DEBUG oslo.service.loopingcall [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.975469] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1823.975719] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb98adac-75d1-45f4-825e-dfd4ab89cb56 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.999176] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1823.999176] env[63379]: value = "task-1780135" [ 1823.999176] env[63379]: _type = "Task" [ 1823.999176] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.008980] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780135, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.024556] env[63379]: DEBUG oslo_vmware.api [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780133, 'name': PowerOnVM_Task, 'duration_secs': 0.663427} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.024827] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1824.025084] env[63379]: INFO nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1824.025284] env[63379]: DEBUG nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1824.026362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e05fd8-9133-4600-870f-c23b6b60040e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.091882] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.732s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.094295] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.227s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.094562] env[63379]: DEBUG nova.objects.instance [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'resources' on Instance uuid 62494fa1-5990-490d-92ae-00607d7ebba1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.098702] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 41952d7b-ce23-4e9b-8843-bbac1d3099c1] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1824.116068] env[63379]: INFO nova.scheduler.client.report [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Deleted allocations for instance f99bad46-931d-497a-8586-b140309b0b45 [ 1824.515145] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780135, 'name': CreateVM_Task, 'duration_secs': 0.363385} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.518960] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1824.519893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.520224] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.520729] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1824.521837] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e380866-e140-429d-b400-a0991c58bc24 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.528883] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1824.528883] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52781126-8d68-0ae3-017e-cceb2ca0d146" [ 1824.528883] env[63379]: _type = "Task" [ 1824.528883] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.545169] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52781126-8d68-0ae3-017e-cceb2ca0d146, 'name': SearchDatastore_Task, 'duration_secs': 0.010046} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.547942] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.548125] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1824.548460] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.548674] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.549026] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1824.549692] env[63379]: INFO nova.compute.manager [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Took 14.89 seconds to build instance. [ 1824.551282] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03c40bed-354b-481d-875e-afa15009510c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.563108] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1824.563108] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1824.565912] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6694c810-0826-48d6-94aa-3360b8046c0f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.574818] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1824.574818] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525edea4-7eeb-6fe4-14f8-d05fcf88b348" [ 1824.574818] env[63379]: _type = "Task" [ 1824.574818] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.586151] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525edea4-7eeb-6fe4-14f8-d05fcf88b348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.602234] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 15d19ce3-ea71-47ff-a738-9ba00b8dfcf1] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1824.624417] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4e91aa5d-a0bf-44c6-b45d-a7e8951abf92 tempest-DeleteServersAdminTestJSON-408815497 tempest-DeleteServersAdminTestJSON-408815497-project-member] Lock "f99bad46-931d-497a-8586-b140309b0b45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.089s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.788913] env[63379]: DEBUG nova.network.neutron [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Updated VIF entry in instance network info cache for port e11104ca-6957-4cad-9666-a5c91da87b62. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1824.789902] env[63379]: DEBUG nova.network.neutron [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Updating instance_info_cache with network_info: [{"id": "e11104ca-6957-4cad-9666-a5c91da87b62", "address": "fa:16:3e:dc:48:9c", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape11104ca-69", "ovs_interfaceid": "e11104ca-6957-4cad-9666-a5c91da87b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.807740] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828e4933-6ead-4ec0-b652-9c881fade82b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.816091] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cabfdd3-a087-49f0-b6f3-81c979878432 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.848741] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e543c17e-b0e3-4df3-a0e1-b6d4849bf68b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.856397] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feff0209-15bd-4fab-941f-4e1dbe1d7c9b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.869745] env[63379]: DEBUG nova.compute.provider_tree [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.054961] env[63379]: DEBUG oslo_concurrency.lockutils [None req-aa9cdb47-63c5-40a7-8091-248c938bbeb6 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.404s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.088095] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525edea4-7eeb-6fe4-14f8-d05fcf88b348, 'name': SearchDatastore_Task, 'duration_secs': 0.010559} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.089060] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f496a55d-823d-4c06-a037-3f0f0c6c030b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.095632] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1825.095632] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52643805-16d9-d5a9-3938-3b11a149bb90" [ 1825.095632] env[63379]: _type = "Task" [ 1825.095632] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.106598] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: bf0dd3cf-684c-4378-a89c-5b9f16df062d] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1825.108176] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52643805-16d9-d5a9-3938-3b11a149bb90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.292372] env[63379]: DEBUG oslo_concurrency.lockutils [req-e5f69541-87c3-44f0-9ac4-dac39a07e346 req-87b5f253-9b94-4b46-a4c0-7149d4019d63 service nova] Releasing lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.373395] env[63379]: DEBUG nova.scheduler.client.report [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1825.435794] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.436104] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.436377] env[63379]: DEBUG nova.compute.manager [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Going to confirm migration 4 {{(pid=63379) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1825.608321] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52643805-16d9-d5a9-3938-3b11a149bb90, 'name': SearchDatastore_Task, 'duration_secs': 0.013291} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.608321] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.608321] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 815d0af5-e9a8-4475-9414-42715ea32d6a/815d0af5-e9a8-4475-9414-42715ea32d6a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1825.608321] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a306c20-0b5f-4fd7-9d3b-1c2a5d46071b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.610456] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: d47be684-6cd8-45c6-8c6a-9a6db0390f97] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1825.618450] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1825.618450] env[63379]: value = "task-1780136" [ 1825.618450] env[63379]: _type = "Task" [ 1825.618450] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.626705] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.878454] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.784s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.905619] env[63379]: INFO nova.scheduler.client.report [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted allocations for instance 62494fa1-5990-490d-92ae-00607d7ebba1 [ 1825.946907] env[63379]: DEBUG nova.compute.manager [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Received event network-changed-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1825.947115] env[63379]: DEBUG nova.compute.manager [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Refreshing instance network info cache due to event network-changed-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1825.947268] env[63379]: DEBUG oslo_concurrency.lockutils [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] Acquiring lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.947439] env[63379]: DEBUG oslo_concurrency.lockutils [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] Acquired lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.947598] env[63379]: DEBUG nova.network.neutron [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Refreshing network info cache for port f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1825.986924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.987637] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.987637] env[63379]: DEBUG nova.network.neutron [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1825.987872] env[63379]: DEBUG nova.objects.instance [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'info_cache' on Instance uuid 9faef8ba-2263-4af8-ba5b-13a17b4275b6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1826.114271] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: aa0d7b97-c8b2-4b45-baf4-1d9e783f0fae] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 1826.131288] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780136, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.148026] env[63379]: DEBUG nova.compute.manager [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1826.148026] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42288885-acd8-41fc-bdb8-d60c18176e66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.415273] env[63379]: DEBUG oslo_concurrency.lockutils [None req-586ea9fa-2a62-463e-b4a3-668c2ecee318 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "62494fa1-5990-490d-92ae-00607d7ebba1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.010s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.617936] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.617936] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances with incomplete migration {{(pid=63379) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11312}} [ 1826.630211] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780136, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645259} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.630487] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 815d0af5-e9a8-4475-9414-42715ea32d6a/815d0af5-e9a8-4475-9414-42715ea32d6a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1826.630700] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1826.630954] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87eeb5c5-ddb8-45b7-b933-132b0b22ebd4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.639540] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1826.639540] env[63379]: value = "task-1780137" [ 1826.639540] env[63379]: _type = "Task" [ 1826.639540] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.649485] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.660497] env[63379]: INFO nova.compute.manager [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] instance snapshotting [ 1826.661779] env[63379]: DEBUG nova.objects.instance [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'flavor' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1826.683671] env[63379]: DEBUG nova.network.neutron [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updated VIF entry in instance network info cache for port f7fd4937-49e3-4d89-8fed-cc6c052fc1c0. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.683671] env[63379]: DEBUG nova.network.neutron [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updating instance_info_cache with network_info: [{"id": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "address": "fa:16:3e:7f:3c:a0", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7fd4937-49", "ovs_interfaceid": "f7fd4937-49e3-4d89-8fed-cc6c052fc1c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.120689] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.151518] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.210604} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.151872] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1827.152693] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dd4291-7386-4482-850f-e68eef9f15b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.180638] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 815d0af5-e9a8-4475-9414-42715ea32d6a/815d0af5-e9a8-4475-9414-42715ea32d6a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1827.183454] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd6aa01-d8f7-4a58-acc3-2d395e8841f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.186284] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba81174a-6fb5-4de0-83e5-539fa003e8c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.201806] env[63379]: DEBUG oslo_concurrency.lockutils [req-b9c1d3a7-8c54-416b-bfb2-5ed122fe7510 req-b66b4f4b-552d-4691-85d7-e41a84ac19eb service nova] Releasing lock "refresh_cache-2be6bdea-416e-4912-8930-3c4e4f194f99" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.221501] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af49bbb-3792-4969-ab55-1c248615135b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.224597] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1827.224597] env[63379]: value = "task-1780138" [ 1827.224597] env[63379]: _type = "Task" [ 1827.224597] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.240942] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780138, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.315528] env[63379]: DEBUG nova.network.neutron [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance_info_cache with network_info: [{"id": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "address": "fa:16:3e:a5:a9:b3", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap414f9be4-c9", "ovs_interfaceid": "414f9be4-c922-4750-817c-32bc1d4ac6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.655320] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.703728] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "88dae632-b363-4187-9198-e4300783d420" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.704525] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.735389] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1827.735898] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780138, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.735898] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e201a622-cce5-4b0d-9332-7509047e632b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.743469] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1827.743469] env[63379]: value = "task-1780139" [ 1827.743469] env[63379]: _type = "Task" [ 1827.743469] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.751214] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780139, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.818730] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-9faef8ba-2263-4af8-ba5b-13a17b4275b6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.818961] env[63379]: DEBUG nova.objects.instance [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'migration_context' on Instance uuid 9faef8ba-2263-4af8-ba5b-13a17b4275b6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1828.161234] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1828.161863] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_power_states {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1828.207033] env[63379]: DEBUG nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1828.235776] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780138, 'name': ReconfigVM_Task, 'duration_secs': 0.778217} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.236114] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 815d0af5-e9a8-4475-9414-42715ea32d6a/815d0af5-e9a8-4475-9414-42715ea32d6a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1828.236758] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a9ec09c-43c4-4ffe-882d-b0f28678edb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.244078] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1828.244078] env[63379]: value = "task-1780140" [ 1828.244078] env[63379]: _type = "Task" [ 1828.244078] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.255282] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780139, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.258432] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780140, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.321811] env[63379]: DEBUG nova.objects.base [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Object Instance<9faef8ba-2263-4af8-ba5b-13a17b4275b6> lazy-loaded attributes: info_cache,migration_context {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1828.322764] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ea6e65-4e17-47f9-b8ec-8f38bffaa17e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.342880] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-880748ae-843a-4949-a1ff-afcfb6483536 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.348547] env[63379]: DEBUG oslo_vmware.api [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1828.348547] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52883210-c6c1-4c18-89b6-e8ddca86a26f" [ 1828.348547] env[63379]: _type = "Task" [ 1828.348547] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.356240] env[63379]: DEBUG oslo_vmware.api [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52883210-c6c1-4c18-89b6-e8ddca86a26f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.666651] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Getting list of instances from cluster (obj){ [ 1828.666651] env[63379]: value = "domain-c8" [ 1828.666651] env[63379]: _type = "ClusterComputeResource" [ 1828.666651] env[63379]: } {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1828.667799] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b85bebd-c4ec-4ff8-98c8-6bdeb76ae042 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.687361] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Got total of 10 instances {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1828.687537] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.687731] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.687889] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 510db409-0b4c-494a-8084-39ef3cd6c918 {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.688070] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid a7cce485-7476-4ea1-b127-68d879e164cd {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.688236] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 19941838-d6b0-4fb8-9d06-f4a1b80ba428 {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.688397] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.688544] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.689130] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 9faef8ba-2263-4af8-ba5b-13a17b4275b6 {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.689130] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 2be6bdea-416e-4912-8930-3c4e4f194f99 {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.689130] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 815d0af5-e9a8-4475-9414-42715ea32d6a {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 1828.689322] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.689547] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.689810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.689999] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.690243] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "510db409-0b4c-494a-8084-39ef3cd6c918" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.690435] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "510db409-0b4c-494a-8084-39ef3cd6c918" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.690651] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "a7cce485-7476-4ea1-b127-68d879e164cd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.690829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "a7cce485-7476-4ea1-b127-68d879e164cd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.691065] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.691318] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.691476] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.691651] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.691814] env[63379]: INFO nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] During sync_power_state the instance has a pending task (image_pending_upload). Skip. [ 1828.691974] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.692177] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.692379] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.692606] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.692813] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.692986] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.693225] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "815d0af5-e9a8-4475-9414-42715ea32d6a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.694041] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517219c8-3945-42d6-a5ac-0e1001398a2b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.697147] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640c7d35-0967-4074-a6e1-8947220c2e9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.699846] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3895f5-8dc7-45d6-8f0c-2df426a17ba1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.702533] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd3cc11-b692-4866-a8e9-1c5214a7ef01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.705103] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1417e871-59ad-4c2f-99b1-26435acfb6de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.707799] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a994c9b-1450-4c27-83b8-8f665fbdd229 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.711019] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d3652b-1a9b-46a1-afbe-5a5c8068c43a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.742429] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.743236] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.744177] env[63379]: INFO nova.compute.claims [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1828.761526] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780139, 'name': CreateSnapshot_Task, 'duration_secs': 0.552873} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.764206] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1828.764497] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780140, 'name': Rename_Task, 'duration_secs': 0.151778} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.765480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634439c5-1d1f-4e72-b1e2-f2893fffa89a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.770877] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1828.770877] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b93c9010-aa6f-45c4-971e-5f7d46089f40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.780232] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1828.780232] env[63379]: value = "task-1780141" [ 1828.780232] env[63379]: _type = "Task" [ 1828.780232] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.790219] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.858921] env[63379]: DEBUG oslo_vmware.api [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52883210-c6c1-4c18-89b6-e8ddca86a26f, 'name': SearchDatastore_Task, 'duration_secs': 0.006848} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.859259] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.238263] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.548s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.239798] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "a7cce485-7476-4ea1-b127-68d879e164cd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.240128] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "510db409-0b4c-494a-8084-39ef3cd6c918" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.550s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.240434] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.247915] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.555s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.253700] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.271176] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.580s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.287657] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1829.288060] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0cfa53de-7743-4cb2-8163-e00a463feb1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.302648] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780141, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.303935] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1829.303935] env[63379]: value = "task-1780142" [ 1829.303935] env[63379]: _type = "Task" [ 1829.303935] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.311223] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.801177] env[63379]: DEBUG oslo_vmware.api [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780141, 'name': PowerOnVM_Task, 'duration_secs': 0.95589} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.801465] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1829.801867] env[63379]: INFO nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1829.801867] env[63379]: DEBUG nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1829.802555] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf29a398-a578-474d-9b3e-d683db363aa9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.815682] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.923456] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a4f37e-3b92-44e7-93d6-ff76eb82dcee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.931830] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b1f627-692f-4402-9e17-0410f8ed5848 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.961265] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598383a7-c049-4f3e-add4-74c5745a0090 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.968786] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b0a81c-45da-439e-a976-b51b4676822a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.981939] env[63379]: DEBUG nova.compute.provider_tree [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.318999] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.329894] env[63379]: INFO nova.compute.manager [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Took 13.06 seconds to build instance. [ 1830.484969] env[63379]: DEBUG nova.scheduler.client.report [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1830.820246] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.831877] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0ae9b36f-85aa-4da9-b9fc-8d7b65c351b4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.573s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.832181] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.139s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.832383] env[63379]: INFO nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] During sync_power_state the instance has a pending task (spawning). Skip. [ 1830.832561] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.918066] env[63379]: DEBUG nova.compute.manager [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Received event network-changed-e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1830.918293] env[63379]: DEBUG nova.compute.manager [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Refreshing instance network info cache due to event network-changed-e11104ca-6957-4cad-9666-a5c91da87b62. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1830.918603] env[63379]: DEBUG oslo_concurrency.lockutils [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] Acquiring lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.918678] env[63379]: DEBUG oslo_concurrency.lockutils [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] Acquired lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.918819] env[63379]: DEBUG nova.network.neutron [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Refreshing network info cache for port e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.995027] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.995027] env[63379]: DEBUG nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1830.995804] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.137s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.318783] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.501082] env[63379]: DEBUG nova.compute.utils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1831.502954] env[63379]: DEBUG nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1831.502954] env[63379]: DEBUG nova.network.neutron [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1831.559183] env[63379]: DEBUG nova.policy [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '991a93509b8943a693859488a56352b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928a9d102f0e45b897eae72fa566c0fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1831.650108] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5ba419-1e76-4601-bcc1-5f42172a0839 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.658273] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6011921d-21cb-430b-bc14-71b265643586 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.691137] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aad5d12-2db7-4660-9498-aee5b7f58c7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.699733] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d4cac6-0e05-4082-a156-42645500083e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.713214] env[63379]: DEBUG nova.compute.provider_tree [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1831.751226] env[63379]: DEBUG nova.network.neutron [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Updated VIF entry in instance network info cache for port e11104ca-6957-4cad-9666-a5c91da87b62. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1831.751605] env[63379]: DEBUG nova.network.neutron [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Updating instance_info_cache with network_info: [{"id": "e11104ca-6957-4cad-9666-a5c91da87b62", "address": "fa:16:3e:dc:48:9c", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape11104ca-69", "ovs_interfaceid": "e11104ca-6957-4cad-9666-a5c91da87b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.822800] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.877017] env[63379]: DEBUG nova.network.neutron [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Successfully created port: 083ff06b-fbdc-4b0f-9c47-6fce99aa11ac {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1832.006185] env[63379]: DEBUG nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1832.216920] env[63379]: DEBUG nova.scheduler.client.report [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1832.253956] env[63379]: DEBUG oslo_concurrency.lockutils [req-02262820-7d2a-4ff2-9b18-27f9f37a6e69 req-935fc4f6-cb12-44f4-824b-4a20b20ffe54 service nova] Releasing lock "refresh_cache-815d0af5-e9a8-4475-9414-42715ea32d6a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.320269] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.496437] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.820850] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.964458] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.964811] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1833.015170] env[63379]: DEBUG nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1833.037497] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1833.037753] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1833.037916] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.038123] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1833.038322] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.038491] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1833.038708] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1833.038873] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1833.039062] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1833.039237] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1833.039434] env[63379]: DEBUG nova.virt.hardware [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1833.040776] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0c3ca3-81dd-405a-ad2d-33b64abfbea6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.048435] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4ce1ce-cfa4-410a-a099-1a96c9e91279 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.228076] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.232s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.321447] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.821886] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.921634] env[63379]: INFO nova.scheduler.client.report [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted allocation for migration 3b490a78-18fd-4882-b501-f1ff04f2cb79 [ 1833.963739] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.323156] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.426688] env[63379]: DEBUG oslo_concurrency.lockutils [None req-65251b24-3584-46e0-93fb-8074285413b6 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.990s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.427900] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.735s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.430210] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ca20f3-a267-48cb-9889-513f5002d38c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.466491] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.466739] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.466907] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.467106] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1834.467977] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411a595e-6d8b-438c-92e9-4e89215975c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.475945] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8031b44-ba0f-44df-a9a0-de5608aa2604 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.490114] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd4e8d3-d264-4877-b25f-534c650b221b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.496670] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5cf81b-f9c4-48bd-9f03-2fddec00ebc3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.526484] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179900MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1834.526640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.526820] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.823579] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.939460] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.036577] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.036815] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.037106] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.037430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.037601] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.039675] env[63379]: INFO nova.compute.manager [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Terminating instance [ 1835.041407] env[63379]: DEBUG nova.compute.manager [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1835.041713] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1835.042560] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f4fa11-5257-46d2-94c0-9ab37068fb49 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.051385] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1835.054423] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71b8b916-82be-4b2a-a702-7215e9670194 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.061758] env[63379]: DEBUG oslo_vmware.api [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1835.061758] env[63379]: value = "task-1780143" [ 1835.061758] env[63379]: _type = "Task" [ 1835.061758] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.070656] env[63379]: DEBUG oslo_vmware.api [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.325025] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.502408] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "5e7a8635-8345-41c3-b485-a89773f37c5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.502682] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.557692] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.557986] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.558290] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.558498] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.558676] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.560902] env[63379]: INFO nova.compute.manager [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Terminating instance [ 1835.565639] env[63379]: DEBUG nova.compute.manager [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1835.565842] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1835.566880] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567036] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1d76a28f-822d-4b4f-be2f-2ad3371b3979 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567192] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 510db409-0b4c-494a-8084-39ef3cd6c918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567329] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a7cce485-7476-4ea1-b127-68d879e164cd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567450] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 19941838-d6b0-4fb8-9d06-f4a1b80ba428 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567564] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 4b419aa8-d4da-45fd-a6da-6f05ee851f2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567679] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 8078bac6-146a-4e3a-a7a7-7093f617a330 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567799] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 9faef8ba-2263-4af8-ba5b-13a17b4275b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.567929] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 2be6bdea-416e-4912-8930-3c4e4f194f99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.568037] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 815d0af5-e9a8-4475-9414-42715ea32d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.568178] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 88dae632-b363-4187-9198-e4300783d420 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1835.569821] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddbe8b3-e6bc-4867-8e4b-d6824ade9274 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.578255] env[63379]: DEBUG oslo_vmware.api [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780143, 'name': PowerOffVM_Task, 'duration_secs': 0.247104} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.580287] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1835.580476] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1835.580752] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1835.581502] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86f4a2c2-b519-4330-8c6a-d98729195f28 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.582991] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2003479c-3731-4293-8076-cc5ae41baad8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.589305] env[63379]: DEBUG oslo_vmware.api [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1835.589305] env[63379]: value = "task-1780145" [ 1835.589305] env[63379]: _type = "Task" [ 1835.589305] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.596734] env[63379]: DEBUG oslo_vmware.api [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.826294] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.005599] env[63379]: DEBUG nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1836.074248] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 5e7a8635-8345-41c3-b485-a89773f37c5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.074512] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1836.074662] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1836.101348] env[63379]: DEBUG oslo_vmware.api [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780145, 'name': PowerOffVM_Task, 'duration_secs': 0.169535} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.101786] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1836.101786] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1836.102048] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a83bf28b-1220-4936-b28d-316043108031 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.225442] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b20938a-a235-4770-b0ab-8bbae1e93884 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.235026] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c4b9dd-34db-47f1-be4b-39039aa80c3e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.264345] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085b195e-e0ee-4c1c-9aec-25529d14a1fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.271780] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ff932e-92b2-4663-9997-2456f1ce0272 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.285436] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1836.326019] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.525324] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.788912] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1836.828643] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.295038] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1837.295038] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.768s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.295038] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.770s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.296602] env[63379]: INFO nova.compute.claims [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1837.329059] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.828428] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.959883] env[63379]: DEBUG oslo_concurrency.lockutils [None req-85d960cf-9042-4ef4-aedb-2f1c4bbf24f9 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.960173] env[63379]: DEBUG oslo_concurrency.lockutils [None req-85d960cf-9042-4ef4-aedb-2f1c4bbf24f9 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.960518] env[63379]: DEBUG nova.objects.instance [None req-85d960cf-9042-4ef4-aedb-2f1c4bbf24f9 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'flavor' on Instance uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1838.295076] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.295354] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.295514] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1838.331620] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.433630] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f33df1-8ec6-42ad-9d41-6ffb24e18cec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.441694] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfad4a4-9a44-4683-9c8c-27b91a01e484 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.473243] env[63379]: DEBUG nova.objects.instance [None req-85d960cf-9042-4ef4-aedb-2f1c4bbf24f9 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'pci_requests' on Instance uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1838.476092] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5b5f4e-77ab-43d3-b5da-0832cf43cc3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.484153] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955de6ee-2779-462d-9390-da5ba9e0e7fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.497982] env[63379]: DEBUG nova.compute.provider_tree [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.724637] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1838.725016] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1838.725313] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleting the datastore file [datastore1] 9faef8ba-2263-4af8-ba5b-13a17b4275b6 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1838.725670] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-260600f6-02ac-4635-84c0-ec8c7f5ff081 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.733490] env[63379]: DEBUG oslo_vmware.api [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1838.733490] env[63379]: value = "task-1780147" [ 1838.733490] env[63379]: _type = "Task" [ 1838.733490] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.734328] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1838.734558] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1838.734746] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Deleting the datastore file [datastore1] 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1838.738560] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e6e83c4-d758-4afa-af94-3308d93d67ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.745484] env[63379]: DEBUG oslo_vmware.api [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.746827] env[63379]: DEBUG oslo_vmware.api [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1838.746827] env[63379]: value = "task-1780148" [ 1838.746827] env[63379]: _type = "Task" [ 1838.746827] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.756803] env[63379]: DEBUG oslo_vmware.api [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.830988] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.979604] env[63379]: DEBUG nova.objects.base [None req-85d960cf-9042-4ef4-aedb-2f1c4bbf24f9 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Object Instance<8078bac6-146a-4e3a-a7a7-7093f617a330> lazy-loaded attributes: flavor,pci_requests {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1838.979799] env[63379]: DEBUG nova.network.neutron [None req-85d960cf-9042-4ef4-aedb-2f1c4bbf24f9 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1839.001099] env[63379]: DEBUG nova.scheduler.client.report [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1839.053266] env[63379]: DEBUG oslo_concurrency.lockutils [None req-85d960cf-9042-4ef4-aedb-2f1c4bbf24f9 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.093s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.244611] env[63379]: DEBUG oslo_vmware.api [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151188} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.244856] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1839.245064] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1839.245268] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1839.245417] env[63379]: INFO nova.compute.manager [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Took 4.20 seconds to destroy the instance on the hypervisor. [ 1839.245771] env[63379]: DEBUG oslo.service.loopingcall [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1839.245933] env[63379]: DEBUG nova.compute.manager [-] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1839.246088] env[63379]: DEBUG nova.network.neutron [-] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1839.255854] env[63379]: DEBUG oslo_vmware.api [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156933} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.256118] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1839.256311] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1839.256493] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1839.256675] env[63379]: INFO nova.compute.manager [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Took 3.69 seconds to destroy the instance on the hypervisor. [ 1839.256910] env[63379]: DEBUG oslo.service.loopingcall [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1839.257179] env[63379]: DEBUG nova.compute.manager [-] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1839.257297] env[63379]: DEBUG nova.network.neutron [-] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1839.330352] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780142, 'name': CloneVM_Task, 'duration_secs': 9.545059} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.331053] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Created linked-clone VM from snapshot [ 1839.331978] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1129231-685c-4ac3-b764-8884f5327aa9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.341230] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Uploading image b70147f5-2309-47ec-8e13-4ad453606361 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1839.362774] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1839.362774] env[63379]: value = "vm-369481" [ 1839.362774] env[63379]: _type = "VirtualMachine" [ 1839.362774] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1839.363076] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0dca2f74-9377-453d-9d8a-3ead5065b1cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.370148] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease: (returnval){ [ 1839.370148] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52441679-551b-88b4-0dc0-24a9d1f84531" [ 1839.370148] env[63379]: _type = "HttpNfcLease" [ 1839.370148] env[63379]: } obtained for exporting VM: (result){ [ 1839.370148] env[63379]: value = "vm-369481" [ 1839.370148] env[63379]: _type = "VirtualMachine" [ 1839.370148] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1839.370401] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the lease: (returnval){ [ 1839.370401] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52441679-551b-88b4-0dc0-24a9d1f84531" [ 1839.370401] env[63379]: _type = "HttpNfcLease" [ 1839.370401] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1839.376728] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1839.376728] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52441679-551b-88b4-0dc0-24a9d1f84531" [ 1839.376728] env[63379]: _type = "HttpNfcLease" [ 1839.376728] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1839.506373] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.506925] env[63379]: DEBUG nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1839.523375] env[63379]: DEBUG nova.compute.manager [req-c36491f9-749d-48ff-84f3-67185fbec32a req-11bb620a-0b62-469a-ab39-1be61fa4936a service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Received event network-vif-deleted-414f9be4-c922-4750-817c-32bc1d4ac6c4 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1839.523642] env[63379]: INFO nova.compute.manager [req-c36491f9-749d-48ff-84f3-67185fbec32a req-11bb620a-0b62-469a-ab39-1be61fa4936a service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Neutron deleted interface 414f9be4-c922-4750-817c-32bc1d4ac6c4; detaching it from the instance and deleting it from the info cache [ 1839.523819] env[63379]: DEBUG nova.network.neutron [req-c36491f9-749d-48ff-84f3-67185fbec32a req-11bb620a-0b62-469a-ab39-1be61fa4936a service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.685996] env[63379]: DEBUG nova.compute.manager [req-5cd41c4a-8ee2-4348-9dac-55e66bc28aed req-30d24b70-dc44-48a4-89a7-af73dff942de service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Received event network-vif-deleted-2ac41cb5-759a-42a6-a664-26ad0cc81d81 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1839.686274] env[63379]: INFO nova.compute.manager [req-5cd41c4a-8ee2-4348-9dac-55e66bc28aed req-30d24b70-dc44-48a4-89a7-af73dff942de service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Neutron deleted interface 2ac41cb5-759a-42a6-a664-26ad0cc81d81; detaching it from the instance and deleting it from the info cache [ 1839.686501] env[63379]: DEBUG nova.network.neutron [req-5cd41c4a-8ee2-4348-9dac-55e66bc28aed req-30d24b70-dc44-48a4-89a7-af73dff942de service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.803399] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Skipping network cache update for instance because it is being deleted. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10023}} [ 1839.878998] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1839.878998] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52441679-551b-88b4-0dc0-24a9d1f84531" [ 1839.878998] env[63379]: _type = "HttpNfcLease" [ 1839.878998] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1839.879334] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1839.879334] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52441679-551b-88b4-0dc0-24a9d1f84531" [ 1839.879334] env[63379]: _type = "HttpNfcLease" [ 1839.879334] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1839.880307] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa9ba05-b935-45da-8357-4d1e7be4c856 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.889361] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2df53-1819-1451-6779-ed22832264e2/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1839.889691] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2df53-1819-1451-6779-ed22832264e2/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1839.952303] env[63379]: DEBUG nova.network.neutron [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Successfully updated port: 083ff06b-fbdc-4b0f-9c47-6fce99aa11ac {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1839.983081] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-926e27ca-2c95-4eb8-b835-8cccad840df5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.000729] env[63379]: DEBUG nova.network.neutron [-] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.013074] env[63379]: DEBUG nova.compute.utils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1840.014932] env[63379]: DEBUG nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1840.015116] env[63379]: DEBUG nova.network.neutron [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1840.026019] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c7e1ab1-4b2c-4fb3-8535-20b79b6eff87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.035425] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4104f3ab-0fad-4aaf-bd56-9ce6affe211a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.056061] env[63379]: DEBUG nova.policy [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd786092d394d1a9b444051664ac7ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f28f4532d464e6eb90ab75799990c85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1840.070650] env[63379]: DEBUG nova.compute.manager [req-c36491f9-749d-48ff-84f3-67185fbec32a req-11bb620a-0b62-469a-ab39-1be61fa4936a service nova] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Detach interface failed, port_id=414f9be4-c922-4750-817c-32bc1d4ac6c4, reason: Instance 9faef8ba-2263-4af8-ba5b-13a17b4275b6 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1840.166428] env[63379]: DEBUG nova.network.neutron [-] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.188933] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-179a2d5b-9984-4a14-8a5e-65497483b204 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.199485] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2f1890-4b40-40b1-bb34-60678582b7c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.229451] env[63379]: DEBUG nova.compute.manager [req-5cd41c4a-8ee2-4348-9dac-55e66bc28aed req-30d24b70-dc44-48a4-89a7-af73dff942de service nova] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Detach interface failed, port_id=2ac41cb5-759a-42a6-a664-26ad0cc81d81, reason: Instance 1d76a28f-822d-4b4f-be2f-2ad3371b3979 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1840.350577] env[63379]: DEBUG nova.network.neutron [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Successfully created port: 1aac08ab-b9d0-4a05-b102-37187767d4ef {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1840.455163] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.455372] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.455542] env[63379]: DEBUG nova.network.neutron [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1840.505425] env[63379]: INFO nova.compute.manager [-] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Took 1.26 seconds to deallocate network for instance. [ 1840.518088] env[63379]: DEBUG nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1840.670578] env[63379]: INFO nova.compute.manager [-] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Took 1.41 seconds to deallocate network for instance. [ 1840.837787] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.838210] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.838411] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1840.988995] env[63379]: DEBUG nova.network.neutron [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1841.013340] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.013576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.013804] env[63379]: DEBUG nova.objects.instance [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'resources' on Instance uuid 9faef8ba-2263-4af8-ba5b-13a17b4275b6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.017789] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.018135] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.018509] env[63379]: DEBUG nova.objects.instance [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'flavor' on Instance uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.134526] env[63379]: DEBUG nova.network.neutron [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance_info_cache with network_info: [{"id": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "address": "fa:16:3e:b4:7f:07", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap083ff06b-fb", "ovs_interfaceid": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.177245] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.532539] env[63379]: DEBUG nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1841.558516] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1841.558886] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1841.559188] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1841.559428] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1841.559615] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1841.559829] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1841.560158] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1841.560368] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1841.560675] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1841.560945] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1841.561380] env[63379]: DEBUG nova.virt.hardware [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1841.562169] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3915ca19-2bef-4db3-9a48-f2ba9694ccd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.572843] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83af1a66-c755-4f2c-aa8f-79bc4404ffcd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.630235] env[63379]: DEBUG nova.compute.manager [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Received event network-vif-plugged-083ff06b-fbdc-4b0f-9c47-6fce99aa11ac {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1841.630235] env[63379]: DEBUG oslo_concurrency.lockutils [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] Acquiring lock "88dae632-b363-4187-9198-e4300783d420-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.630580] env[63379]: DEBUG oslo_concurrency.lockutils [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] Lock "88dae632-b363-4187-9198-e4300783d420-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.630774] env[63379]: DEBUG oslo_concurrency.lockutils [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] Lock "88dae632-b363-4187-9198-e4300783d420-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.630917] env[63379]: DEBUG nova.compute.manager [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] No waiting events found dispatching network-vif-plugged-083ff06b-fbdc-4b0f-9c47-6fce99aa11ac {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1841.631114] env[63379]: WARNING nova.compute.manager [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Received unexpected event network-vif-plugged-083ff06b-fbdc-4b0f-9c47-6fce99aa11ac for instance with vm_state building and task_state spawning. [ 1841.631476] env[63379]: DEBUG nova.compute.manager [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Received event network-changed-083ff06b-fbdc-4b0f-9c47-6fce99aa11ac {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1841.631758] env[63379]: DEBUG nova.compute.manager [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Refreshing instance network info cache due to event network-changed-083ff06b-fbdc-4b0f-9c47-6fce99aa11ac. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1841.632088] env[63379]: DEBUG oslo_concurrency.lockutils [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] Acquiring lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.637768] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.637768] env[63379]: DEBUG nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Instance network_info: |[{"id": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "address": "fa:16:3e:b4:7f:07", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap083ff06b-fb", "ovs_interfaceid": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1841.638156] env[63379]: DEBUG oslo_concurrency.lockutils [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] Acquired lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.638395] env[63379]: DEBUG nova.network.neutron [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Refreshing network info cache for port 083ff06b-fbdc-4b0f-9c47-6fce99aa11ac {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1841.641378] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:7f:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23fc30ea-1f06-424d-86e1-27ae5435b1a9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '083ff06b-fbdc-4b0f-9c47-6fce99aa11ac', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1841.654389] env[63379]: DEBUG oslo.service.loopingcall [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.655773] env[63379]: DEBUG nova.objects.instance [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'pci_requests' on Instance uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.662307] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88dae632-b363-4187-9198-e4300783d420] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1841.665095] env[63379]: DEBUG nova.objects.base [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Object Instance<8078bac6-146a-4e3a-a7a7-7093f617a330> lazy-loaded attributes: flavor,pci_requests {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1841.665985] env[63379]: DEBUG nova.network.neutron [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1841.667383] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e241aef4-14d1-43de-9f57-256070719d33 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.700850] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1841.700850] env[63379]: value = "task-1780150" [ 1841.700850] env[63379]: _type = "Task" [ 1841.700850] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.712603] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780150, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.733042] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff985eab-3517-4564-bdda-994f79a364e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.741053] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c02fff-7052-49df-a1a7-70a6160ea899 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.746159] env[63379]: DEBUG nova.policy [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1841.777777] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91633e3-6e60-4663-93d0-e18e3fa69902 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.785968] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebed358-e0d4-4bba-9d15-5e881f2668f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.800612] env[63379]: DEBUG nova.compute.provider_tree [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.890854] env[63379]: DEBUG nova.compute.manager [req-60954bf3-32bb-45fe-8b6d-b028bc876d1a req-115d90f8-071b-472c-ac48-e484c330425c service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Received event network-vif-plugged-1aac08ab-b9d0-4a05-b102-37187767d4ef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1841.891185] env[63379]: DEBUG oslo_concurrency.lockutils [req-60954bf3-32bb-45fe-8b6d-b028bc876d1a req-115d90f8-071b-472c-ac48-e484c330425c service nova] Acquiring lock "5e7a8635-8345-41c3-b485-a89773f37c5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.891478] env[63379]: DEBUG oslo_concurrency.lockutils [req-60954bf3-32bb-45fe-8b6d-b028bc876d1a req-115d90f8-071b-472c-ac48-e484c330425c service nova] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.891726] env[63379]: DEBUG oslo_concurrency.lockutils [req-60954bf3-32bb-45fe-8b6d-b028bc876d1a req-115d90f8-071b-472c-ac48-e484c330425c service nova] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.891970] env[63379]: DEBUG nova.compute.manager [req-60954bf3-32bb-45fe-8b6d-b028bc876d1a req-115d90f8-071b-472c-ac48-e484c330425c service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] No waiting events found dispatching network-vif-plugged-1aac08ab-b9d0-4a05-b102-37187767d4ef {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1841.892252] env[63379]: WARNING nova.compute.manager [req-60954bf3-32bb-45fe-8b6d-b028bc876d1a req-115d90f8-071b-472c-ac48-e484c330425c service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Received unexpected event network-vif-plugged-1aac08ab-b9d0-4a05-b102-37187767d4ef for instance with vm_state building and task_state spawning. [ 1842.013208] env[63379]: DEBUG nova.network.neutron [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Successfully created port: 1ede15ef-deb2-4892-b7bc-b98c45fd7fcb {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1842.213700] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780150, 'name': CreateVM_Task, 'duration_secs': 0.35826} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.213887] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88dae632-b363-4187-9198-e4300783d420] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1842.214606] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.214788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.215237] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1842.215515] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-822fe210-e3f2-46a0-9f5f-a632a340d1c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.220537] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1842.220537] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52400f48-c54e-10dc-0aa3-60ae7c7b8bcc" [ 1842.220537] env[63379]: _type = "Task" [ 1842.220537] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.228795] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52400f48-c54e-10dc-0aa3-60ae7c7b8bcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.301656] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [{"id": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "address": "fa:16:3e:bc:a5:55", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cdabd2b-f6", "ovs_interfaceid": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.303758] env[63379]: DEBUG nova.scheduler.client.report [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1842.396187] env[63379]: DEBUG nova.network.neutron [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Successfully updated port: 1aac08ab-b9d0-4a05-b102-37187767d4ef {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1842.442721] env[63379]: DEBUG nova.network.neutron [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Updated VIF entry in instance network info cache for port 083ff06b-fbdc-4b0f-9c47-6fce99aa11ac. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1842.443263] env[63379]: DEBUG nova.network.neutron [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance_info_cache with network_info: [{"id": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "address": "fa:16:3e:b4:7f:07", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap083ff06b-fb", "ovs_interfaceid": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.731309] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52400f48-c54e-10dc-0aa3-60ae7c7b8bcc, 'name': SearchDatastore_Task, 'duration_secs': 0.012454} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.731706] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.731865] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1842.732124] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.732350] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.732571] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1842.732958] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34fd2b23-6412-492d-8fe6-1e7b82ed1d88 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.752694] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1842.752931] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1842.753768] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34b7dc58-1916-4791-b79f-3119ce88c497 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.759533] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1842.759533] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524d25f1-65de-0094-43c0-1ceba30e2cef" [ 1842.759533] env[63379]: _type = "Task" [ 1842.759533] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.768662] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524d25f1-65de-0094-43c0-1ceba30e2cef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.804607] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.804907] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 1842.805391] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1842.805431] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1842.805622] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1842.808661] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.811133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.634s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.811384] env[63379]: DEBUG nova.objects.instance [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'resources' on Instance uuid 1d76a28f-822d-4b4f-be2f-2ad3371b3979 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1842.833148] env[63379]: INFO nova.scheduler.client.report [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted allocations for instance 9faef8ba-2263-4af8-ba5b-13a17b4275b6 [ 1842.898931] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "refresh_cache-5e7a8635-8345-41c3-b485-a89773f37c5e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.899142] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "refresh_cache-5e7a8635-8345-41c3-b485-a89773f37c5e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.899325] env[63379]: DEBUG nova.network.neutron [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1842.946750] env[63379]: DEBUG oslo_concurrency.lockutils [req-762833dc-f23f-4914-bec9-798f0b517f5a req-9862c306-aa4e-4455-9d32-53bce917cb03 service nova] Releasing lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.964629] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1843.270971] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524d25f1-65de-0094-43c0-1ceba30e2cef, 'name': SearchDatastore_Task, 'duration_secs': 0.009072} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.271892] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e14203f8-d83c-45b5-9782-9b0c88a2c38d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.278172] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1843.278172] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d1325d-83e7-f05c-5d0a-618e0546379a" [ 1843.278172] env[63379]: _type = "Task" [ 1843.278172] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.285762] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d1325d-83e7-f05c-5d0a-618e0546379a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.341639] env[63379]: DEBUG oslo_concurrency.lockutils [None req-de2f4a12-526d-46ec-9727-7397c3c6f9ef tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "9faef8ba-2263-4af8-ba5b-13a17b4275b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.305s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.441649] env[63379]: DEBUG nova.network.neutron [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1843.462131] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c0d9ce-8330-4b5d-88e7-b65e342bf63a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.471686] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff450d63-1925-4c14-a3b2-0d21a57d6c6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.506076] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d15379-a9e2-4d5b-9963-0b05f7e9ca30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.513905] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a844621f-516e-4340-b3ae-8b7c707e7f9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.527329] env[63379]: DEBUG nova.compute.provider_tree [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1843.735551] env[63379]: DEBUG nova.network.neutron [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Updating instance_info_cache with network_info: [{"id": "1aac08ab-b9d0-4a05-b102-37187767d4ef", "address": "fa:16:3e:1c:7d:60", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1aac08ab-b9", "ovs_interfaceid": "1aac08ab-b9d0-4a05-b102-37187767d4ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.792334] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d1325d-83e7-f05c-5d0a-618e0546379a, 'name': SearchDatastore_Task, 'duration_secs': 0.010998} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.792630] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.792894] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 88dae632-b363-4187-9198-e4300783d420/88dae632-b363-4187-9198-e4300783d420.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1843.793176] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b49247d-e299-49a9-a690-e2312761e605 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.803337] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1843.803337] env[63379]: value = "task-1780151" [ 1843.803337] env[63379]: _type = "Task" [ 1843.803337] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.809149] env[63379]: DEBUG nova.compute.manager [req-f89a5842-54a3-4804-a708-720dfd8e1cef req-5f6c1c37-5a04-44be-b1df-0ca6ca6a622f service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-vif-plugged-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1843.809669] env[63379]: DEBUG oslo_concurrency.lockutils [req-f89a5842-54a3-4804-a708-720dfd8e1cef req-5f6c1c37-5a04-44be-b1df-0ca6ca6a622f service nova] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.811085] env[63379]: DEBUG oslo_concurrency.lockutils [req-f89a5842-54a3-4804-a708-720dfd8e1cef req-5f6c1c37-5a04-44be-b1df-0ca6ca6a622f service nova] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.811330] env[63379]: DEBUG oslo_concurrency.lockutils [req-f89a5842-54a3-4804-a708-720dfd8e1cef req-5f6c1c37-5a04-44be-b1df-0ca6ca6a622f service nova] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.811566] env[63379]: DEBUG nova.compute.manager [req-f89a5842-54a3-4804-a708-720dfd8e1cef req-5f6c1c37-5a04-44be-b1df-0ca6ca6a622f service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] No waiting events found dispatching network-vif-plugged-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1843.811753] env[63379]: WARNING nova.compute.manager [req-f89a5842-54a3-4804-a708-720dfd8e1cef req-5f6c1c37-5a04-44be-b1df-0ca6ca6a622f service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received unexpected event network-vif-plugged-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb for instance with vm_state active and task_state None. [ 1843.818337] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.828257] env[63379]: DEBUG nova.compute.manager [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Received event network-changed-1aac08ab-b9d0-4a05-b102-37187767d4ef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1843.828451] env[63379]: DEBUG nova.compute.manager [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Refreshing instance network info cache due to event network-changed-1aac08ab-b9d0-4a05-b102-37187767d4ef. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1843.828638] env[63379]: DEBUG oslo_concurrency.lockutils [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] Acquiring lock "refresh_cache-5e7a8635-8345-41c3-b485-a89773f37c5e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.843548] env[63379]: DEBUG nova.network.neutron [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Successfully updated port: 1ede15ef-deb2-4892-b7bc-b98c45fd7fcb {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1844.030851] env[63379]: DEBUG nova.scheduler.client.report [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1844.240934] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "refresh_cache-5e7a8635-8345-41c3-b485-a89773f37c5e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.241309] env[63379]: DEBUG nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Instance network_info: |[{"id": "1aac08ab-b9d0-4a05-b102-37187767d4ef", "address": "fa:16:3e:1c:7d:60", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1aac08ab-b9", "ovs_interfaceid": "1aac08ab-b9d0-4a05-b102-37187767d4ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1844.241736] env[63379]: DEBUG oslo_concurrency.lockutils [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] Acquired lock "refresh_cache-5e7a8635-8345-41c3-b485-a89773f37c5e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.242052] env[63379]: DEBUG nova.network.neutron [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Refreshing network info cache for port 1aac08ab-b9d0-4a05-b102-37187767d4ef {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1844.243673] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:7d:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8f441782-e89c-4815-b53e-af83c5d27902', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1aac08ab-b9d0-4a05-b102-37187767d4ef', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1844.254286] env[63379]: DEBUG oslo.service.loopingcall [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1844.257986] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1844.258681] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65c1dfbb-4ea1-4b8a-b97e-763c1dedb81b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.286182] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1844.286182] env[63379]: value = "task-1780152" [ 1844.286182] env[63379]: _type = "Task" [ 1844.286182] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.295665] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780152, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.313978] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780151, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.347431] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.347761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.348135] env[63379]: DEBUG nova.network.neutron [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1844.520939] env[63379]: DEBUG nova.network.neutron [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Updated VIF entry in instance network info cache for port 1aac08ab-b9d0-4a05-b102-37187767d4ef. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1844.521445] env[63379]: DEBUG nova.network.neutron [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Updating instance_info_cache with network_info: [{"id": "1aac08ab-b9d0-4a05-b102-37187767d4ef", "address": "fa:16:3e:1c:7d:60", "network": {"id": "a2c9b802-041e-4679-bfb1-118fd9cd10f3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-986609966-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f28f4532d464e6eb90ab75799990c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8f441782-e89c-4815-b53e-af83c5d27902", "external-id": "nsx-vlan-transportzone-562", "segmentation_id": 562, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1aac08ab-b9", "ovs_interfaceid": "1aac08ab-b9d0-4a05-b102-37187767d4ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.536591] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.564231] env[63379]: INFO nova.scheduler.client.report [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Deleted allocations for instance 1d76a28f-822d-4b4f-be2f-2ad3371b3979 [ 1844.796485] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780152, 'name': CreateVM_Task, 'duration_secs': 0.433946} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.796828] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1844.797457] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.797726] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.798171] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1844.798530] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae7bf209-f65c-4b11-b592-dc3a60ffbf5e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.803471] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1844.803471] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed9497-ba83-55a8-047b-709626f566a6" [ 1844.803471] env[63379]: _type = "Task" [ 1844.803471] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.813896] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed9497-ba83-55a8-047b-709626f566a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.816951] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780151, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.828779} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.817233] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 88dae632-b363-4187-9198-e4300783d420/88dae632-b363-4187-9198-e4300783d420.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1844.817465] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1844.817722] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35a7ff28-522a-4fc5-97b6-8d19ba8c27ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.824406] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1844.824406] env[63379]: value = "task-1780153" [ 1844.824406] env[63379]: _type = "Task" [ 1844.824406] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.831639] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.899320] env[63379]: WARNING nova.network.neutron [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] 501025fb-aee7-4f74-80fd-af4976529317 already exists in list: networks containing: ['501025fb-aee7-4f74-80fd-af4976529317']. ignoring it [ 1845.024837] env[63379]: DEBUG oslo_concurrency.lockutils [req-17475f4e-523a-4f94-b6ce-06e14424731d req-ca873652-0837-4dd2-87f0-0af2121bb675 service nova] Releasing lock "refresh_cache-5e7a8635-8345-41c3-b485-a89773f37c5e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.073654] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bbe7bf11-3789-462c-aa41-749e03a8e391 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "1d76a28f-822d-4b4f-be2f-2ad3371b3979" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.516s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.259857] env[63379]: DEBUG nova.network.neutron [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "address": "fa:16:3e:22:7d:a3", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ede15ef-de", "ovs_interfaceid": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.314381] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ed9497-ba83-55a8-047b-709626f566a6, 'name': SearchDatastore_Task, 'duration_secs': 0.01115} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.314701] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.314953] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1845.315222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.315388] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.315576] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1845.315871] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6bbc483-c191-4e72-a8fd-682673c7ef8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.324643] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1845.324836] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1845.325723] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c4b4d7c-e6b6-4e63-960c-c4ae2b777218 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.334104] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1845.334104] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52709420-c6a4-a536-bf90-92c1f14df053" [ 1845.334104] env[63379]: _type = "Task" [ 1845.334104] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.337016] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06682} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.340105] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1845.340865] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1944c10e-2edb-4142-a45b-947317daee79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.349371] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52709420-c6a4-a536-bf90-92c1f14df053, 'name': SearchDatastore_Task, 'duration_secs': 0.010379} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.370845] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 88dae632-b363-4187-9198-e4300783d420/88dae632-b363-4187-9198-e4300783d420.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1845.371173] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afca226b-9703-4806-be70-c2ad2b9a25fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.373627] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23f78e3c-757a-4832-a143-45477bdd058b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.391777] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1845.391777] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527eec03-ff35-e701-df47-9a3d0c8507db" [ 1845.391777] env[63379]: _type = "Task" [ 1845.391777] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.395775] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1845.395775] env[63379]: value = "task-1780154" [ 1845.395775] env[63379]: _type = "Task" [ 1845.395775] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.401761] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527eec03-ff35-e701-df47-9a3d0c8507db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.406670] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780154, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.762687] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.763389] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.763558] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.764476] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84934f90-860e-4d4f-afd0-95648a0076e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.781536] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1845.781832] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1845.782137] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1845.782464] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1845.782734] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1845.782982] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1845.783345] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1845.783628] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1845.783921] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1845.784222] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1845.784520] env[63379]: DEBUG nova.virt.hardware [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1845.791057] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfiguring VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1845.791368] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f48361c7-82a7-433e-97d2-16fb23424ecf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.811611] env[63379]: DEBUG oslo_vmware.api [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1845.811611] env[63379]: value = "task-1780155" [ 1845.811611] env[63379]: _type = "Task" [ 1845.811611] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.820075] env[63379]: DEBUG oslo_vmware.api [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780155, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.840483] env[63379]: DEBUG nova.compute.manager [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-changed-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1845.840691] env[63379]: DEBUG nova.compute.manager [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing instance network info cache due to event network-changed-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1845.840912] env[63379]: DEBUG oslo_concurrency.lockutils [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.841076] env[63379]: DEBUG oslo_concurrency.lockutils [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.841250] env[63379]: DEBUG nova.network.neutron [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing network info cache for port 1ede15ef-deb2-4892-b7bc-b98c45fd7fcb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1845.907911] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527eec03-ff35-e701-df47-9a3d0c8507db, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.907911] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.907911] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5e7a8635-8345-41c3-b485-a89773f37c5e/5e7a8635-8345-41c3-b485-a89773f37c5e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1845.907911] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be804a36-1bca-47bb-adee-e39e2b49d0b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.914122] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780154, 'name': ReconfigVM_Task, 'duration_secs': 0.30144} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.914122] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 88dae632-b363-4187-9198-e4300783d420/88dae632-b363-4187-9198-e4300783d420.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1845.914122] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4661b3d-977f-4b68-8104-1d8d240382a4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.917085] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1845.917085] env[63379]: value = "task-1780156" [ 1845.917085] env[63379]: _type = "Task" [ 1845.917085] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.926686] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.928336] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1845.928336] env[63379]: value = "task-1780157" [ 1845.928336] env[63379]: _type = "Task" [ 1845.928336] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.940273] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780157, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.323728] env[63379]: DEBUG oslo_vmware.api [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.431032] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780156, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.437622] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780157, 'name': Rename_Task, 'duration_secs': 0.160311} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.438304] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1846.438750] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c8574f4-22b5-4c2b-bc31-e93a74089566 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.445652] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1846.445652] env[63379]: value = "task-1780158" [ 1846.445652] env[63379]: _type = "Task" [ 1846.445652] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.454512] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.696613] env[63379]: DEBUG nova.network.neutron [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updated VIF entry in instance network info cache for port 1ede15ef-deb2-4892-b7bc-b98c45fd7fcb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1846.697094] env[63379]: DEBUG nova.network.neutron [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "address": "fa:16:3e:22:7d:a3", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ede15ef-de", "ovs_interfaceid": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.822466] env[63379]: DEBUG oslo_vmware.api [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780155, 'name': ReconfigVM_Task, 'duration_secs': 0.614465} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.822983] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.823222] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfigured VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1846.928965] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780156, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.83404} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.929797] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 5e7a8635-8345-41c3-b485-a89773f37c5e/5e7a8635-8345-41c3-b485-a89773f37c5e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1846.929797] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1846.930061] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48a1e7f5-ed73-4739-b1f2-c0ec9692d006 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.937668] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1846.937668] env[63379]: value = "task-1780159" [ 1846.937668] env[63379]: _type = "Task" [ 1846.937668] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.946795] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780159, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.955434] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780158, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.200631] env[63379]: DEBUG oslo_concurrency.lockutils [req-9a66aa75-6bd4-4611-81bf-f53e8b7bb4a8 req-c9a124d2-bfed-4ea1-b1b2-2dca57f44ee6 service nova] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.328507] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c437adcd-13c9-4fcc-a73b-32652d1d3607 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.310s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.352895] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.353208] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.449136] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780159, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074899} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.452594] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1847.453441] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb25d6bd-25ff-4aa3-92ae-0d9e7b4d1eb1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.460691] env[63379]: DEBUG oslo_vmware.api [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780158, 'name': PowerOnVM_Task, 'duration_secs': 0.53049} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.469828] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1847.470079] env[63379]: INFO nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Took 14.45 seconds to spawn the instance on the hypervisor. [ 1847.470278] env[63379]: DEBUG nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1847.479206] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 5e7a8635-8345-41c3-b485-a89773f37c5e/5e7a8635-8345-41c3-b485-a89773f37c5e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1847.479970] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740a143e-df01-4c7d-b840-b214fe95bf64 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.482766] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f183133d-8787-48ea-8a80-c408992ae90b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.505058] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1847.505058] env[63379]: value = "task-1780160" [ 1847.505058] env[63379]: _type = "Task" [ 1847.505058] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.514034] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780160, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.856230] env[63379]: DEBUG nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1848.003778] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2df53-1819-1451-6779-ed22832264e2/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1848.004856] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c84e5d9-722d-4529-a7ad-9d16b45488ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.017650] env[63379]: INFO nova.compute.manager [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Took 19.30 seconds to build instance. [ 1848.019969] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2df53-1819-1451-6779-ed22832264e2/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1848.020154] env[63379]: ERROR oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2df53-1819-1451-6779-ed22832264e2/disk-0.vmdk due to incomplete transfer. [ 1848.023388] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-76b22afb-deb3-42d4-89ee-071354e476bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.024963] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780160, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.032253] env[63379]: DEBUG oslo_vmware.rw_handles [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a2df53-1819-1451-6779-ed22832264e2/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1848.032469] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Uploaded image b70147f5-2309-47ec-8e13-4ad453606361 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1848.034925] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1848.035471] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4eb85086-cc39-40b1-af7f-94f2961c02dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.041595] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1848.041595] env[63379]: value = "task-1780161" [ 1848.041595] env[63379]: _type = "Task" [ 1848.041595] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.050642] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780161, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.380448] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.381012] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.383559] env[63379]: INFO nova.compute.claims [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1848.522314] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780160, 'name': ReconfigVM_Task, 'duration_secs': 0.573803} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.522693] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 5e7a8635-8345-41c3-b485-a89773f37c5e/5e7a8635-8345-41c3-b485-a89773f37c5e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1848.523619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-daf78507-32fe-4d6c-9d2d-f2d45c60c9c6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.820s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.523903] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75a6b2fa-4764-4316-8e44-13ca61cc7c35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.530231] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1848.530231] env[63379]: value = "task-1780162" [ 1848.530231] env[63379]: _type = "Task" [ 1848.530231] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.538720] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780162, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.549365] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780161, 'name': Destroy_Task, 'duration_secs': 0.396553} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.549541] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Destroyed the VM [ 1848.549793] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1848.550046] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8c935109-5955-4c8c-a318-c40daf09cabd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.555421] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1848.555421] env[63379]: value = "task-1780163" [ 1848.555421] env[63379]: _type = "Task" [ 1848.555421] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.564971] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780163, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.043117] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780162, 'name': Rename_Task, 'duration_secs': 0.242342} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.043542] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1849.044402] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad42b1a8-352c-4117-b55f-be81852a1f8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.050410] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1849.050410] env[63379]: value = "task-1780164" [ 1849.050410] env[63379]: _type = "Task" [ 1849.050410] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.058525] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.065852] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780163, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.093582] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-9563f28a-e929-45dc-ab80-2300f0de4e08" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.093917] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-9563f28a-e929-45dc-ab80-2300f0de4e08" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.094312] env[63379]: DEBUG nova.objects.instance [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'flavor' on Instance uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.560695] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780164, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.565028] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5035c11-6248-4fc2-b2ca-cbaf6ce0b39d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.572279] env[63379]: DEBUG oslo_vmware.api [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780163, 'name': RemoveSnapshot_Task, 'duration_secs': 0.566604} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.574280] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1849.575025] env[63379]: INFO nova.compute.manager [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Took 22.40 seconds to snapshot the instance on the hypervisor. [ 1849.577713] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b37e95-058e-4da8-af26-eeba7ffa1828 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.612835] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eea252e-b55a-474a-bc81-29d00acb3d2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.621166] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575fd4e7-4a8e-483d-8fed-65094a897442 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.636626] env[63379]: DEBUG nova.compute.provider_tree [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.723308] env[63379]: DEBUG nova.objects.instance [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'pci_requests' on Instance uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.760826] env[63379]: DEBUG nova.compute.manager [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1850.062038] env[63379]: DEBUG oslo_vmware.api [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780164, 'name': PowerOnVM_Task, 'duration_secs': 0.824092} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.062038] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1850.062038] env[63379]: INFO nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Took 8.53 seconds to spawn the instance on the hypervisor. [ 1850.062038] env[63379]: DEBUG nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1850.062754] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a23995f-1442-4a80-b4ad-f2d01b93b4ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.140494] env[63379]: DEBUG nova.scheduler.client.report [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1850.146145] env[63379]: DEBUG nova.compute.manager [None req-7a42a6e7-37c0-46ff-9375-d368f626e4bd tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Found 1 images (rotation: 2) {{(pid=63379) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1850.226427] env[63379]: DEBUG nova.objects.base [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Object Instance<8078bac6-146a-4e3a-a7a7-7093f617a330> lazy-loaded attributes: flavor,pci_requests {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1850.226676] env[63379]: DEBUG nova.network.neutron [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1850.278566] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.296437] env[63379]: DEBUG nova.policy [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1850.584453] env[63379]: INFO nova.compute.manager [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Took 14.07 seconds to build instance. [ 1850.649057] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.268s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.649635] env[63379]: DEBUG nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1850.652792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.374s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.086891] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e06524e9-73d9-4a48-b052-6c76b0273be9 tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.584s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.156254] env[63379]: DEBUG nova.compute.utils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1851.160724] env[63379]: INFO nova.compute.claims [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1851.166647] env[63379]: DEBUG nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1851.166940] env[63379]: DEBUG nova.network.neutron [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1851.219074] env[63379]: DEBUG nova.policy [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '919743457d6845ddb3f34a321dc963c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fceda42cf54845eab8068573e0f8eb26', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1851.317658] env[63379]: DEBUG nova.compute.manager [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1851.318649] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c84e9ba-7fb0-43ec-8eb8-763f075b4b3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.484026] env[63379]: DEBUG nova.network.neutron [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Successfully created port: 1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1851.668049] env[63379]: DEBUG nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1851.675868] env[63379]: INFO nova.compute.resource_tracker [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating resource usage from migration bcef3c59-0512-4ea1-9f60-aa29a0ecea06 [ 1851.831795] env[63379]: INFO nova.compute.manager [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] instance snapshotting [ 1851.832788] env[63379]: DEBUG nova.objects.instance [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'flavor' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1851.874349] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91643aa6-1925-4750-85c9-a5eefe6e8b27 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.882827] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcad1b82-5d31-4df0-a7cd-23036d2fc10c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.921715] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3bb26a-d437-47fd-b52f-f03a701ebb7b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.925248] env[63379]: DEBUG nova.compute.manager [req-c564acdb-f770-4646-8bdc-fe17f2c4ad22 req-5f713d73-5088-4d7d-aa44-dd65c6b3cc99 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-vif-plugged-9563f28a-e929-45dc-ab80-2300f0de4e08 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1851.925504] env[63379]: DEBUG oslo_concurrency.lockutils [req-c564acdb-f770-4646-8bdc-fe17f2c4ad22 req-5f713d73-5088-4d7d-aa44-dd65c6b3cc99 service nova] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.925722] env[63379]: DEBUG oslo_concurrency.lockutils [req-c564acdb-f770-4646-8bdc-fe17f2c4ad22 req-5f713d73-5088-4d7d-aa44-dd65c6b3cc99 service nova] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.925892] env[63379]: DEBUG oslo_concurrency.lockutils [req-c564acdb-f770-4646-8bdc-fe17f2c4ad22 req-5f713d73-5088-4d7d-aa44-dd65c6b3cc99 service nova] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.926081] env[63379]: DEBUG nova.compute.manager [req-c564acdb-f770-4646-8bdc-fe17f2c4ad22 req-5f713d73-5088-4d7d-aa44-dd65c6b3cc99 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] No waiting events found dispatching network-vif-plugged-9563f28a-e929-45dc-ab80-2300f0de4e08 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1851.926260] env[63379]: WARNING nova.compute.manager [req-c564acdb-f770-4646-8bdc-fe17f2c4ad22 req-5f713d73-5088-4d7d-aa44-dd65c6b3cc99 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received unexpected event network-vif-plugged-9563f28a-e929-45dc-ab80-2300f0de4e08 for instance with vm_state active and task_state None. [ 1851.932816] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee1df16-bacd-489d-80df-3f75f6bf7f40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.937175] env[63379]: DEBUG nova.network.neutron [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Successfully updated port: 9563f28a-e929-45dc-ab80-2300f0de4e08 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1851.952335] env[63379]: DEBUG nova.compute.provider_tree [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1852.202449] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "5e7a8635-8345-41c3-b485-a89773f37c5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.202771] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.202994] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "5e7a8635-8345-41c3-b485-a89773f37c5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.203268] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.203489] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.206613] env[63379]: INFO nova.compute.manager [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Terminating instance [ 1852.207537] env[63379]: DEBUG nova.compute.manager [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1852.207777] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1852.208719] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bafd2cd-1fea-47f2-9221-cd3d0d25b431 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.221676] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1852.221972] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17c6340b-ca39-427d-999a-a3f9d93069a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.229621] env[63379]: DEBUG oslo_vmware.api [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1852.229621] env[63379]: value = "task-1780165" [ 1852.229621] env[63379]: _type = "Task" [ 1852.229621] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.237773] env[63379]: DEBUG oslo_vmware.api [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.338616] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba723d5b-cce9-4a15-b4ed-3f6bf3d6ceaf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.357534] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a447841-9a8b-4dee-b998-0ec06f506cd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.442792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.443071] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.443310] env[63379]: DEBUG nova.network.neutron [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1852.455685] env[63379]: DEBUG nova.scheduler.client.report [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1852.690105] env[63379]: DEBUG nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1852.712934] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1852.713215] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1852.713383] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1852.713574] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1852.713724] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1852.713874] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1852.714099] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1852.714269] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1852.714445] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1852.714615] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1852.714797] env[63379]: DEBUG nova.virt.hardware [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1852.715714] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c59e494-9a36-42ae-9e9a-5827fb6da259 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.723661] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4d4f36-db63-4458-952c-d85fafe92078 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.745530] env[63379]: DEBUG oslo_vmware.api [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780165, 'name': PowerOffVM_Task, 'duration_secs': 0.483089} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.745801] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1852.745973] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1852.746234] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6712cd1-ba38-4b4a-af6e-78421b58aba7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.868316] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1852.868659] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-98e48e74-e63d-43dc-a21d-625f0ce96bbb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.876117] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1852.876117] env[63379]: value = "task-1780167" [ 1852.876117] env[63379]: _type = "Task" [ 1852.876117] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.883850] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780167, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.941161] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1852.941418] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1852.941611] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleting the datastore file [datastore1] 5e7a8635-8345-41c3-b485-a89773f37c5e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1852.941886] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8504b0c-388a-4264-a13f-86adb398a6a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.948644] env[63379]: DEBUG oslo_vmware.api [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for the task: (returnval){ [ 1852.948644] env[63379]: value = "task-1780168" [ 1852.948644] env[63379]: _type = "Task" [ 1852.948644] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.956706] env[63379]: DEBUG oslo_vmware.api [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.960556] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.308s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.960764] env[63379]: INFO nova.compute.manager [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Migrating [ 1852.995208] env[63379]: WARNING nova.network.neutron [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] 501025fb-aee7-4f74-80fd-af4976529317 already exists in list: networks containing: ['501025fb-aee7-4f74-80fd-af4976529317']. ignoring it [ 1852.995511] env[63379]: WARNING nova.network.neutron [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] 501025fb-aee7-4f74-80fd-af4976529317 already exists in list: networks containing: ['501025fb-aee7-4f74-80fd-af4976529317']. ignoring it [ 1853.265705] env[63379]: DEBUG nova.compute.manager [req-b8d723da-8019-448b-b233-5e54c40f43ee req-9fd61353-da00-410f-91c6-e4d814996e69 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Received event network-vif-plugged-1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1853.266095] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8d723da-8019-448b-b233-5e54c40f43ee req-9fd61353-da00-410f-91c6-e4d814996e69 service nova] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.266434] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8d723da-8019-448b-b233-5e54c40f43ee req-9fd61353-da00-410f-91c6-e4d814996e69 service nova] Lock "d3c05ba6-b565-4432-b815-14ae0933853e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.266714] env[63379]: DEBUG oslo_concurrency.lockutils [req-b8d723da-8019-448b-b233-5e54c40f43ee req-9fd61353-da00-410f-91c6-e4d814996e69 service nova] Lock "d3c05ba6-b565-4432-b815-14ae0933853e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.267009] env[63379]: DEBUG nova.compute.manager [req-b8d723da-8019-448b-b233-5e54c40f43ee req-9fd61353-da00-410f-91c6-e4d814996e69 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] No waiting events found dispatching network-vif-plugged-1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1853.267340] env[63379]: WARNING nova.compute.manager [req-b8d723da-8019-448b-b233-5e54c40f43ee req-9fd61353-da00-410f-91c6-e4d814996e69 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Received unexpected event network-vif-plugged-1b29b7f2-a269-473e-a89e-a072a3155131 for instance with vm_state building and task_state spawning. [ 1853.387276] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780167, 'name': CreateSnapshot_Task, 'duration_secs': 0.453381} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.387512] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1853.388314] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8b9b7a-719d-4ec4-a61e-11a3b66f0f6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.457736] env[63379]: DEBUG oslo_vmware.api [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Task: {'id': task-1780168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147719} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.460158] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1853.460367] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1853.460555] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1853.460734] env[63379]: INFO nova.compute.manager [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1853.460973] env[63379]: DEBUG oslo.service.loopingcall [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.461600] env[63379]: DEBUG nova.compute.manager [-] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1853.461600] env[63379]: DEBUG nova.network.neutron [-] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1853.474619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.474773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.474964] env[63379]: DEBUG nova.network.neutron [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1853.652948] env[63379]: DEBUG nova.network.neutron [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "address": "fa:16:3e:22:7d:a3", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ede15ef-de", "ovs_interfaceid": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9563f28a-e929-45dc-ab80-2300f0de4e08", "address": "fa:16:3e:63:7a:4f", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9563f28a-e9", "ovs_interfaceid": "9563f28a-e929-45dc-ab80-2300f0de4e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.691868] env[63379]: DEBUG nova.compute.manager [req-1d8b22a0-6f70-4609-865a-518b53136971 req-04115f78-098c-4513-a72d-500763376423 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Received event network-vif-deleted-1aac08ab-b9d0-4a05-b102-37187767d4ef {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1853.692103] env[63379]: INFO nova.compute.manager [req-1d8b22a0-6f70-4609-865a-518b53136971 req-04115f78-098c-4513-a72d-500763376423 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Neutron deleted interface 1aac08ab-b9d0-4a05-b102-37187767d4ef; detaching it from the instance and deleting it from the info cache [ 1853.692300] env[63379]: DEBUG nova.network.neutron [req-1d8b22a0-6f70-4609-865a-518b53136971 req-04115f78-098c-4513-a72d-500763376423 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.835027] env[63379]: DEBUG nova.network.neutron [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Successfully updated port: 1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1853.907806] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1853.908130] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c0a557ee-883d-4cac-94bd-a10a376383a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.916749] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1853.916749] env[63379]: value = "task-1780169" [ 1853.916749] env[63379]: _type = "Task" [ 1853.916749] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.925844] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780169, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.939486] env[63379]: DEBUG nova.compute.manager [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-changed-9563f28a-e929-45dc-ab80-2300f0de4e08 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1853.939686] env[63379]: DEBUG nova.compute.manager [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing instance network info cache due to event network-changed-9563f28a-e929-45dc-ab80-2300f0de4e08. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1853.939864] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.155952] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.156749] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.157012] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.157426] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.157661] env[63379]: DEBUG nova.network.neutron [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Refreshing network info cache for port 9563f28a-e929-45dc-ab80-2300f0de4e08 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.159558] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1392c566-3ccc-47a9-b258-d65bb02f73fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.176920] env[63379]: DEBUG nova.network.neutron [-] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.178422] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1854.178681] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1854.178853] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1854.179061] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1854.179245] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1854.179379] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1854.179587] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1854.179752] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1854.179946] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1854.180162] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1854.180351] env[63379]: DEBUG nova.virt.hardware [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1854.186839] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfiguring VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1854.188456] env[63379]: INFO nova.compute.manager [-] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Took 0.73 seconds to deallocate network for instance. [ 1854.188693] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc213b1b-3229-4b1c-8e76-8370f04e13b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.208249] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d33adbf-12b8-4007-baf6-e685d3ac3ee6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.216361] env[63379]: DEBUG oslo_vmware.api [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1854.216361] env[63379]: value = "task-1780170" [ 1854.216361] env[63379]: _type = "Task" [ 1854.216361] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.222933] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71c167e-9104-44b0-84eb-f54603f12859 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.237058] env[63379]: DEBUG oslo_vmware.api [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780170, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.256969] env[63379]: DEBUG nova.compute.manager [req-1d8b22a0-6f70-4609-865a-518b53136971 req-04115f78-098c-4513-a72d-500763376423 service nova] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Detach interface failed, port_id=1aac08ab-b9d0-4a05-b102-37187767d4ef, reason: Instance 5e7a8635-8345-41c3-b485-a89773f37c5e could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1854.258267] env[63379]: DEBUG nova.network.neutron [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance_info_cache with network_info: [{"id": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "address": "fa:16:3e:b4:7f:07", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap083ff06b-fb", "ovs_interfaceid": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.338679] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.338937] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.339027] env[63379]: DEBUG nova.network.neutron [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1854.428107] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780169, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.713043] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.713308] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.713569] env[63379]: DEBUG nova.objects.instance [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lazy-loading 'resources' on Instance uuid 5e7a8635-8345-41c3-b485-a89773f37c5e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1854.727474] env[63379]: DEBUG oslo_vmware.api [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780170, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.761201] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.872266] env[63379]: DEBUG nova.network.neutron [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1854.882232] env[63379]: DEBUG nova.network.neutron [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updated VIF entry in instance network info cache for port 9563f28a-e929-45dc-ab80-2300f0de4e08. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1854.882804] env[63379]: DEBUG nova.network.neutron [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "address": "fa:16:3e:22:7d:a3", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ede15ef-de", "ovs_interfaceid": "1ede15ef-deb2-4892-b7bc-b98c45fd7fcb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9563f28a-e929-45dc-ab80-2300f0de4e08", "address": "fa:16:3e:63:7a:4f", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9563f28a-e9", "ovs_interfaceid": "9563f28a-e929-45dc-ab80-2300f0de4e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.928230] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780169, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.020751] env[63379]: DEBUG nova.network.neutron [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updating instance_info_cache with network_info: [{"id": "1b29b7f2-a269-473e-a89e-a072a3155131", "address": "fa:16:3e:2d:77:3c", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b29b7f2-a2", "ovs_interfaceid": "1b29b7f2-a269-473e-a89e-a072a3155131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.228176] env[63379]: DEBUG oslo_vmware.api [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780170, 'name': ReconfigVM_Task, 'duration_secs': 0.913133} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.228493] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.228724] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfigured VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1855.376712] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b2fdc6-107b-4fab-94fc-4b47abb3d619 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.384553] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314d94f2-5bc9-4a61-8542-92d210ac911f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.388196] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.388430] env[63379]: DEBUG nova.compute.manager [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Received event network-changed-1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1855.388627] env[63379]: DEBUG nova.compute.manager [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Refreshing instance network info cache due to event network-changed-1b29b7f2-a269-473e-a89e-a072a3155131. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1855.388820] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] Acquiring lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.417509] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e603dbb2-1122-4465-ad24-5f6c5b9ca3c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.430529] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13af7398-6b11-4b6d-bbb0-4172bf91e818 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.434187] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780169, 'name': CloneVM_Task, 'duration_secs': 1.320718} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.434445] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Created linked-clone VM from snapshot [ 1855.435452] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25264817-03ab-4f6d-ae56-57ad677b7fb2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.445350] env[63379]: DEBUG nova.compute.provider_tree [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1855.451269] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Uploading image ea87fcd7-6057-4f62-aaab-5b96799012ed {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1855.476940] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1855.476940] env[63379]: value = "vm-369485" [ 1855.476940] env[63379]: _type = "VirtualMachine" [ 1855.476940] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1855.477263] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d7b0062e-cce3-492f-889d-3831f2b4f5b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.483808] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease: (returnval){ [ 1855.483808] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521448aa-8a84-7a6f-a616-8d9750e4a5d4" [ 1855.483808] env[63379]: _type = "HttpNfcLease" [ 1855.483808] env[63379]: } obtained for exporting VM: (result){ [ 1855.483808] env[63379]: value = "vm-369485" [ 1855.483808] env[63379]: _type = "VirtualMachine" [ 1855.483808] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1855.484089] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the lease: (returnval){ [ 1855.484089] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521448aa-8a84-7a6f-a616-8d9750e4a5d4" [ 1855.484089] env[63379]: _type = "HttpNfcLease" [ 1855.484089] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1855.490780] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1855.490780] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521448aa-8a84-7a6f-a616-8d9750e4a5d4" [ 1855.490780] env[63379]: _type = "HttpNfcLease" [ 1855.490780] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1855.523730] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.524047] env[63379]: DEBUG nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Instance network_info: |[{"id": "1b29b7f2-a269-473e-a89e-a072a3155131", "address": "fa:16:3e:2d:77:3c", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b29b7f2-a2", "ovs_interfaceid": "1b29b7f2-a269-473e-a89e-a072a3155131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1855.524341] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] Acquired lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.524528] env[63379]: DEBUG nova.network.neutron [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Refreshing network info cache for port 1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1855.526195] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:77:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b29b7f2-a269-473e-a89e-a072a3155131', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1855.534050] env[63379]: DEBUG oslo.service.loopingcall [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1855.534838] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1855.535086] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-811d81d3-1ae4-4c7e-8c01-e270e93ff155 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.555498] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1855.555498] env[63379]: value = "task-1780172" [ 1855.555498] env[63379]: _type = "Task" [ 1855.555498] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.563306] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780172, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.733961] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ca15651f-338e-44a6-b07a-08f132f23f6c tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-9563f28a-e929-45dc-ab80-2300f0de4e08" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.640s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.787164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "a7cce485-7476-4ea1-b127-68d879e164cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.787502] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "a7cce485-7476-4ea1-b127-68d879e164cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.787721] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "a7cce485-7476-4ea1-b127-68d879e164cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1855.787910] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "a7cce485-7476-4ea1-b127-68d879e164cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.788096] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "a7cce485-7476-4ea1-b127-68d879e164cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.789929] env[63379]: INFO nova.compute.manager [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Terminating instance [ 1855.791579] env[63379]: DEBUG nova.compute.manager [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1855.791783] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1855.792628] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090a8162-e047-4249-bec8-ebe18f90a42f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.800580] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1855.800795] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5674c357-4919-4aa4-b067-7b88ca5dcf9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.807687] env[63379]: DEBUG oslo_vmware.api [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1855.807687] env[63379]: value = "task-1780173" [ 1855.807687] env[63379]: _type = "Task" [ 1855.807687] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.815695] env[63379]: DEBUG oslo_vmware.api [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.949220] env[63379]: DEBUG nova.scheduler.client.report [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1855.995690] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1855.995690] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521448aa-8a84-7a6f-a616-8d9750e4a5d4" [ 1855.995690] env[63379]: _type = "HttpNfcLease" [ 1855.995690] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1855.996018] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1855.996018] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521448aa-8a84-7a6f-a616-8d9750e4a5d4" [ 1855.996018] env[63379]: _type = "HttpNfcLease" [ 1855.996018] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1855.996797] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37bcbc6-bfef-46e1-9a7f-3ab1aa8a6730 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.004225] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529d4f12-fdb0-666e-a790-bed9a498df6c/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1856.004409] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529d4f12-fdb0-666e-a790-bed9a498df6c/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1856.072202] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780172, 'name': CreateVM_Task, 'duration_secs': 0.390628} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.072364] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1856.072987] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.073184] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.073511] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1856.073793] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59daa14a-ce6e-483e-ab27-7729dd9e5887 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.078203] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1856.078203] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521b2888-6225-6f6e-75ac-190243529c49" [ 1856.078203] env[63379]: _type = "Task" [ 1856.078203] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.085467] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521b2888-6225-6f6e-75ac-190243529c49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.096965] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2c430043-8fc5-43e8-9402-34f698089eb1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.274799] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8365b0-e311-4890-ad0d-ae15bda2964e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.278763] env[63379]: DEBUG nova.network.neutron [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updated VIF entry in instance network info cache for port 1b29b7f2-a269-473e-a89e-a072a3155131. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1856.279133] env[63379]: DEBUG nova.network.neutron [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updating instance_info_cache with network_info: [{"id": "1b29b7f2-a269-473e-a89e-a072a3155131", "address": "fa:16:3e:2d:77:3c", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b29b7f2-a2", "ovs_interfaceid": "1b29b7f2-a269-473e-a89e-a072a3155131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.296719] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance '88dae632-b363-4187-9198-e4300783d420' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1856.316823] env[63379]: DEBUG oslo_vmware.api [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780173, 'name': PowerOffVM_Task, 'duration_secs': 0.200239} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.317130] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1856.317341] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1856.317596] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3561f91b-cd2b-46a5-8a03-75f00358c9ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.455074] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.475340] env[63379]: INFO nova.scheduler.client.report [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Deleted allocations for instance 5e7a8635-8345-41c3-b485-a89773f37c5e [ 1856.521115] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.521454] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.521649] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleting the datastore file [datastore1] a7cce485-7476-4ea1-b127-68d879e164cd {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.522078] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f28b60d-b19a-47d0-9ec8-e90bd38481de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.529385] env[63379]: DEBUG oslo_vmware.api [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1856.529385] env[63379]: value = "task-1780175" [ 1856.529385] env[63379]: _type = "Task" [ 1856.529385] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.538170] env[63379]: DEBUG oslo_vmware.api [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.589301] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521b2888-6225-6f6e-75ac-190243529c49, 'name': SearchDatastore_Task, 'duration_secs': 0.008738} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.589886] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.590635] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1856.590752] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.590898] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.591112] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1856.591498] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a37b0498-f913-4e58-948f-2648a9e3608d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.600941] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1856.601225] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1856.602102] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0efc01db-b9f8-46a6-bf7d-8c2185993dc1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.607741] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1856.607741] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52760f07-7b0a-267d-9adb-b81d6de947e8" [ 1856.607741] env[63379]: _type = "Task" [ 1856.607741] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.615975] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52760f07-7b0a-267d-9adb-b81d6de947e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.781838] env[63379]: DEBUG oslo_concurrency.lockutils [req-c9fcd59d-00bd-446a-a791-593a4a404eed req-9d011a69-8d70-4830-9c20-2056280189ba service nova] Releasing lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.803331] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1856.804125] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7533d30b-2544-4e29-b22b-124b71812d6b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.811496] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1856.811496] env[63379]: value = "task-1780176" [ 1856.811496] env[63379]: _type = "Task" [ 1856.811496] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.822140] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.983949] env[63379]: DEBUG oslo_concurrency.lockutils [None req-10ac55e5-0b5e-4dff-b25a-7a0010f6fecc tempest-ServerDiskConfigTestJSON-1340181381 tempest-ServerDiskConfigTestJSON-1340181381-project-member] Lock "5e7a8635-8345-41c3-b485-a89773f37c5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.781s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.040823] env[63379]: DEBUG oslo_vmware.api [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311981} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.041173] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1857.041451] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1857.041712] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1857.041936] env[63379]: INFO nova.compute.manager [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1857.042261] env[63379]: DEBUG oslo.service.loopingcall [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1857.042547] env[63379]: DEBUG nova.compute.manager [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1857.042661] env[63379]: DEBUG nova.network.neutron [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1857.118696] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52760f07-7b0a-267d-9adb-b81d6de947e8, 'name': SearchDatastore_Task, 'duration_secs': 0.015961} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.119991] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbf9cdc-2260-4dbe-9f9b-2aaa356e26d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.125461] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1857.125461] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e1b0d4-e52f-6c48-cdcd-b3d568e288ab" [ 1857.125461] env[63379]: _type = "Task" [ 1857.125461] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.134985] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e1b0d4-e52f-6c48-cdcd-b3d568e288ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.322500] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780176, 'name': PowerOffVM_Task, 'duration_secs': 0.222163} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.323174] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1857.323174] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance '88dae632-b363-4187-9198-e4300783d420' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1857.454232] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.455610] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.503966] env[63379]: DEBUG nova.compute.manager [req-7913304e-9124-4e6c-90db-4329e357e04b req-83509c76-484f-4db7-b63e-d250b3ed65c0 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Received event network-vif-deleted-bf9adade-286a-4e50-a0a5-a80cd17209c6 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1857.504124] env[63379]: INFO nova.compute.manager [req-7913304e-9124-4e6c-90db-4329e357e04b req-83509c76-484f-4db7-b63e-d250b3ed65c0 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Neutron deleted interface bf9adade-286a-4e50-a0a5-a80cd17209c6; detaching it from the instance and deleting it from the info cache [ 1857.504817] env[63379]: DEBUG nova.network.neutron [req-7913304e-9124-4e6c-90db-4329e357e04b req-83509c76-484f-4db7-b63e-d250b3ed65c0 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.639136] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e1b0d4-e52f-6c48-cdcd-b3d568e288ab, 'name': SearchDatastore_Task, 'duration_secs': 0.009981} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.640055] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.640055] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d3c05ba6-b565-4432-b815-14ae0933853e/d3c05ba6-b565-4432-b815-14ae0933853e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1857.641273] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57b6ca85-4983-4e15-8ee9-046874613a47 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.651187] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1857.651187] env[63379]: value = "task-1780177" [ 1857.651187] env[63379]: _type = "Task" [ 1857.651187] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.660748] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780177, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.829719] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1857.830409] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1857.830409] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1857.830570] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1857.830717] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1857.830875] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1857.831107] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1857.831364] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1857.831662] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1857.832021] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1857.832182] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1857.838922] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c7498f7-ef4f-4e0b-bb8b-e73657d911d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.857362] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1857.857362] env[63379]: value = "task-1780178" [ 1857.857362] env[63379]: _type = "Task" [ 1857.857362] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.866646] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780178, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.958879] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.958879] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.959734] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31987f5e-e402-4386-8551-fbb6253a2595 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.981300] env[63379]: DEBUG nova.network.neutron [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.984053] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c1c5cb-984d-45b7-bce7-6ef96aebab30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.015177] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfiguring VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1858.016156] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf9b2d3b-8369-4711-8a18-558ce1866de5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.018490] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0dd6eb1-1a2b-4e7a-b54d-46e023f521e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.041931] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2f4955-86a0-4aaf-b840-d37303bf8f17 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.056207] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1858.056207] env[63379]: value = "task-1780179" [ 1858.056207] env[63379]: _type = "Task" [ 1858.056207] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.071075] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.080910] env[63379]: DEBUG nova.compute.manager [req-7913304e-9124-4e6c-90db-4329e357e04b req-83509c76-484f-4db7-b63e-d250b3ed65c0 service nova] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Detach interface failed, port_id=bf9adade-286a-4e50-a0a5-a80cd17209c6, reason: Instance a7cce485-7476-4ea1-b127-68d879e164cd could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1858.162758] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780177, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.369966] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780178, 'name': ReconfigVM_Task, 'duration_secs': 0.204417} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.370330] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance '88dae632-b363-4187-9198-e4300783d420' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1858.487792] env[63379]: INFO nova.compute.manager [-] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Took 1.44 seconds to deallocate network for instance. [ 1858.568701] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.663610] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780177, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.973626} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.663951] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d3c05ba6-b565-4432-b815-14ae0933853e/d3c05ba6-b565-4432-b815-14ae0933853e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1858.664204] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1858.664546] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b098620-2d51-49a2-a319-5845346201ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.672341] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1858.672341] env[63379]: value = "task-1780180" [ 1858.672341] env[63379]: _type = "Task" [ 1858.672341] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.681057] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780180, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.876916] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1858.877191] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1858.877413] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1858.877617] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1858.877770] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1858.877931] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1858.878251] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1858.878448] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1858.878634] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1858.878805] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1858.878981] env[63379]: DEBUG nova.virt.hardware [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1858.884216] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Reconfiguring VM instance instance-00000063 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1858.884535] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59a07f74-ff03-4374-ae58-9a10ca2ba02c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.905178] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1858.905178] env[63379]: value = "task-1780181" [ 1858.905178] env[63379]: _type = "Task" [ 1858.905178] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.913531] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780181, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.994338] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.994635] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.994892] env[63379]: DEBUG nova.objects.instance [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'resources' on Instance uuid a7cce485-7476-4ea1-b127-68d879e164cd {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1859.067743] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.182607] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780180, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131321} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.182880] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1859.183673] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c50ecd7-312d-4d3d-bcad-811cbc6ba2c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.205406] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] d3c05ba6-b565-4432-b815-14ae0933853e/d3c05ba6-b565-4432-b815-14ae0933853e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1859.205716] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8f60d95-0025-452e-94fb-57fd85fc0abf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.225748] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1859.225748] env[63379]: value = "task-1780182" [ 1859.225748] env[63379]: _type = "Task" [ 1859.225748] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.234137] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780182, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.416296] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780181, 'name': ReconfigVM_Task, 'duration_secs': 0.175065} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.416598] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Reconfigured VM instance instance-00000063 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1859.417528] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16ac223-dbbb-4501-8cae-86475e157ab0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.441407] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 88dae632-b363-4187-9198-e4300783d420/88dae632-b363-4187-9198-e4300783d420.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1859.441776] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-518086d5-9c94-4af6-9dd9-e59ba6c758c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.459806] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1859.459806] env[63379]: value = "task-1780183" [ 1859.459806] env[63379]: _type = "Task" [ 1859.459806] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.469811] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.568548] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.656998] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f15a22a-7ccd-43c1-9fac-ec4288037148 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.665598] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae8f09f-0ee9-4bee-827b-d5299c2fdde8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.695499] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2652aab-fba1-45bc-b67d-030c23fa9e03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.703411] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26995a5-e515-4b0e-8cc2-48960c24635e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.716762] env[63379]: DEBUG nova.compute.provider_tree [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.736794] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780182, 'name': ReconfigVM_Task, 'duration_secs': 0.340586} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.737633] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] d3c05ba6-b565-4432-b815-14ae0933853e/d3c05ba6-b565-4432-b815-14ae0933853e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1859.737795] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9475bac3-f0b7-4510-b037-0cfbd5797d64 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.745319] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1859.745319] env[63379]: value = "task-1780184" [ 1859.745319] env[63379]: _type = "Task" [ 1859.745319] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.755891] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780184, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.969689] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780183, 'name': ReconfigVM_Task, 'duration_secs': 0.309109} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.970111] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 88dae632-b363-4187-9198-e4300783d420/88dae632-b363-4187-9198-e4300783d420.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1859.970882] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance '88dae632-b363-4187-9198-e4300783d420' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1860.075143] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.219859] env[63379]: DEBUG nova.scheduler.client.report [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1860.257456] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780184, 'name': Rename_Task, 'duration_secs': 0.15925} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.257785] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1860.258096] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8b6fe6a-0737-4bc1-a9f5-378f42e060c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.264906] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1860.264906] env[63379]: value = "task-1780185" [ 1860.264906] env[63379]: _type = "Task" [ 1860.264906] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.273536] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780185, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.477864] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90678c60-a90a-469b-9cec-ae4838d7ae77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.497509] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9369be5-6dac-4773-b598-13b7ebd5b05e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.516502] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance '88dae632-b363-4187-9198-e4300783d420' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1860.570505] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.727054] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.752896] env[63379]: INFO nova.scheduler.client.report [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted allocations for instance a7cce485-7476-4ea1-b127-68d879e164cd [ 1860.775784] env[63379]: DEBUG oslo_vmware.api [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780185, 'name': PowerOnVM_Task, 'duration_secs': 0.503726} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.776141] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1860.776349] env[63379]: INFO nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Took 8.09 seconds to spawn the instance on the hypervisor. [ 1860.776574] env[63379]: DEBUG nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1860.777449] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8385b64f-3591-4922-93c2-e3eb2f3b0aa5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.956667] env[63379]: DEBUG oslo_concurrency.lockutils [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.956925] env[63379]: DEBUG oslo_concurrency.lockutils [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.058431] env[63379]: DEBUG nova.network.neutron [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Port 083ff06b-fbdc-4b0f-9c47-6fce99aa11ac binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1861.071621] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.261208] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf32da3b-d2a8-4674-922e-aecdb072d25b tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "a7cce485-7476-4ea1-b127-68d879e164cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.474s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.294845] env[63379]: INFO nova.compute.manager [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Took 12.93 seconds to build instance. [ 1861.393841] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "815d0af5-e9a8-4475-9414-42715ea32d6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.394066] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.394209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "815d0af5-e9a8-4475-9414-42715ea32d6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.394434] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.394642] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.397694] env[63379]: INFO nova.compute.manager [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Terminating instance [ 1861.399962] env[63379]: DEBUG nova.compute.manager [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1861.400212] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1861.401218] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd8597c-587e-43b7-9dda-2484f0a814d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.409747] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1861.410048] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f137bed7-255d-4959-bb9a-61b0a6cddaec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.416059] env[63379]: DEBUG oslo_vmware.api [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1861.416059] env[63379]: value = "task-1780186" [ 1861.416059] env[63379]: _type = "Task" [ 1861.416059] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.423650] env[63379]: DEBUG oslo_vmware.api [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780186, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.460905] env[63379]: DEBUG nova.compute.utils [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1861.579246] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.757753] env[63379]: DEBUG nova.compute.manager [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Received event network-changed-1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1861.757867] env[63379]: DEBUG nova.compute.manager [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Refreshing instance network info cache due to event network-changed-1b29b7f2-a269-473e-a89e-a072a3155131. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1861.758243] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] Acquiring lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.758450] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] Acquired lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.758633] env[63379]: DEBUG nova.network.neutron [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Refreshing network info cache for port 1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1861.796900] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fadb7a10-7c71-45c4-a556-b7728772a5fb tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.443s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.927206] env[63379]: DEBUG oslo_vmware.api [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780186, 'name': PowerOffVM_Task, 'duration_secs': 0.267055} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.927554] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1861.927762] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1861.928066] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2d19ea8-b711-4483-b9ac-79039eb860f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.963977] env[63379]: DEBUG oslo_concurrency.lockutils [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.084581] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.092812] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "88dae632-b363-4187-9198-e4300783d420-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.093143] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.093409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.488228] env[63379]: DEBUG nova.network.neutron [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updated VIF entry in instance network info cache for port 1b29b7f2-a269-473e-a89e-a072a3155131. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.488228] env[63379]: DEBUG nova.network.neutron [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updating instance_info_cache with network_info: [{"id": "1b29b7f2-a269-473e-a89e-a072a3155131", "address": "fa:16:3e:2d:77:3c", "network": {"id": "a7b09ae6-790d-492f-a067-68a9ea22533a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-776111847-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fceda42cf54845eab8068573e0f8eb26", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b29b7f2-a2", "ovs_interfaceid": "1b29b7f2-a269-473e-a89e-a072a3155131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.576602] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.989448] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed00633f-d7bb-40b7-a755-e069c37cfed0 req-569fca0c-6526-42b6-83f3-76d2af9f7620 service nova] Releasing lock "refresh_cache-d3c05ba6-b565-4432-b815-14ae0933853e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.029720] env[63379]: DEBUG oslo_concurrency.lockutils [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.030097] env[63379]: DEBUG oslo_concurrency.lockutils [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.030405] env[63379]: INFO nova.compute.manager [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Attaching volume 6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2 to /dev/sdb [ 1863.073890] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82988fe6-b277-4b47-9ec7-f2f49f6747c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.085271] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.086182] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0151f592-903b-4827-9e13-3e518fa62f66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.103209] env[63379]: DEBUG nova.virt.block_device [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updating existing volume attachment record: 579a2482-209d-41eb-bcfb-47ce64f6dc37 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1863.136551] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.136651] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.136796] env[63379]: DEBUG nova.network.neutron [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1863.577371] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.848089] env[63379]: DEBUG nova.network.neutron [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance_info_cache with network_info: [{"id": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "address": "fa:16:3e:b4:7f:07", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap083ff06b-fb", "ovs_interfaceid": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.951101] env[63379]: DEBUG nova.compute.manager [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1864.078886] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.351103] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.471658] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.471953] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.578770] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.706204] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529d4f12-fdb0-666e-a790-bed9a498df6c/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1864.707686] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16609f5-abaa-4440-8ac5-4d7b3801471b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.716254] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529d4f12-fdb0-666e-a790-bed9a498df6c/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1864.716566] env[63379]: ERROR oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529d4f12-fdb0-666e-a790-bed9a498df6c/disk-0.vmdk due to incomplete transfer. [ 1864.716897] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5471bda2-281f-42ea-b197-3959f696573d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.724988] env[63379]: DEBUG oslo_vmware.rw_handles [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529d4f12-fdb0-666e-a790-bed9a498df6c/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1864.725359] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Uploaded image ea87fcd7-6057-4f62-aaab-5b96799012ed to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1864.728573] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1864.728908] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-77818f54-6a6b-4c8e-aa8d-2d13033fa9a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.736819] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1864.736819] env[63379]: value = "task-1780191" [ 1864.736819] env[63379]: _type = "Task" [ 1864.736819] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.758611] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780191, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.873071] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a13eb1a-42f7-432b-8544-19daa58039f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.892134] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0322a482-9fb5-404c-bae9-e37487805d9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.898997] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance '88dae632-b363-4187-9198-e4300783d420' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1864.977280] env[63379]: INFO nova.compute.claims [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1865.079492] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.246500] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780191, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.405676] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1865.405994] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f42aebc-f46b-4fea-8faa-8705d4e3d948 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.412908] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1865.412908] env[63379]: value = "task-1780193" [ 1865.412908] env[63379]: _type = "Task" [ 1865.412908] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.421866] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.486023] env[63379]: INFO nova.compute.resource_tracker [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating resource usage from migration 98be47e2-858e-45b1-8975-ef107c38cf34 [ 1865.581828] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.625249] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0869e310-a856-4c86-98ef-97e374e57989 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.633538] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7bd667-a902-4adb-9d01-fa3ca09caa68 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.666827] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1dc6c6-effa-4d2a-bc01-3d6e6b341d75 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.674846] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7a0e59-cbb3-4f2e-9480-ea845ed1b1ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.688392] env[63379]: DEBUG nova.compute.provider_tree [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1865.748776] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780191, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.922996] env[63379]: DEBUG oslo_vmware.api [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780193, 'name': PowerOnVM_Task, 'duration_secs': 0.449091} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.923252] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1865.923440] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ea7d7d-0024-405c-883d-1d6304c4abda tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance '88dae632-b363-4187-9198-e4300783d420' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1866.082693] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.191840] env[63379]: DEBUG nova.scheduler.client.report [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1866.247706] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780191, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.289518] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1866.290176] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1866.290176] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleting the datastore file [datastore1] 815d0af5-e9a8-4475-9414-42715ea32d6a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1866.290502] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f49fee64-7177-4276-ab8b-1e858ad78ee3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.297761] env[63379]: DEBUG oslo_vmware.api [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1866.297761] env[63379]: value = "task-1780194" [ 1866.297761] env[63379]: _type = "Task" [ 1866.297761] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.306337] env[63379]: DEBUG oslo_vmware.api [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.584545] env[63379]: DEBUG oslo_vmware.api [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780179, 'name': ReconfigVM_Task, 'duration_secs': 8.396859} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.584822] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.585056] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfigured VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1866.696936] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.225s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.697191] env[63379]: INFO nova.compute.manager [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Migrating [ 1866.749451] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780191, 'name': Destroy_Task, 'duration_secs': 1.674704} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.749609] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Destroyed the VM [ 1866.749840] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1866.750029] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-247062ef-2c13-4e99-9f0b-d6a803b0a4be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.756426] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1866.756426] env[63379]: value = "task-1780195" [ 1866.756426] env[63379]: _type = "Task" [ 1866.756426] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.764277] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780195, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.808151] env[63379]: DEBUG oslo_vmware.api [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165481} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.808419] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1866.808600] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1866.808779] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1866.808948] env[63379]: INFO nova.compute.manager [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Took 5.41 seconds to destroy the instance on the hypervisor. [ 1866.809206] env[63379]: DEBUG oslo.service.loopingcall [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.809393] env[63379]: DEBUG nova.compute.manager [-] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1866.809481] env[63379]: DEBUG nova.network.neutron [-] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1866.812981] env[63379]: DEBUG nova.compute.manager [req-2c666984-3686-4ead-b8cd-4df7ef4dff47 req-3c54d042-4593-4bc6-9d7a-d741b265d003 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-vif-deleted-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1866.813356] env[63379]: INFO nova.compute.manager [req-2c666984-3686-4ead-b8cd-4df7ef4dff47 req-3c54d042-4593-4bc6-9d7a-d741b265d003 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Neutron deleted interface 1ede15ef-deb2-4892-b7bc-b98c45fd7fcb; detaching it from the instance and deleting it from the info cache [ 1866.813579] env[63379]: DEBUG nova.network.neutron [req-2c666984-3686-4ead-b8cd-4df7ef4dff47 req-3c54d042-4593-4bc6-9d7a-d741b265d003 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9563f28a-e929-45dc-ab80-2300f0de4e08", "address": "fa:16:3e:63:7a:4f", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9563f28a-e9", "ovs_interfaceid": "9563f28a-e929-45dc-ab80-2300f0de4e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.051397] env[63379]: DEBUG nova.compute.manager [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-vif-deleted-9563f28a-e929-45dc-ab80-2300f0de4e08 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1867.051630] env[63379]: INFO nova.compute.manager [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Neutron deleted interface 9563f28a-e929-45dc-ab80-2300f0de4e08; detaching it from the instance and deleting it from the info cache [ 1867.051884] env[63379]: DEBUG nova.network.neutron [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.211518] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.211893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.211893] env[63379]: DEBUG nova.network.neutron [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1867.266593] env[63379]: DEBUG oslo_vmware.api [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780195, 'name': RemoveSnapshot_Task, 'duration_secs': 0.45768} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.266593] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1867.266881] env[63379]: INFO nova.compute.manager [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Took 14.93 seconds to snapshot the instance on the hypervisor. [ 1867.317032] env[63379]: DEBUG oslo_concurrency.lockutils [req-2c666984-3686-4ead-b8cd-4df7ef4dff47 req-3c54d042-4593-4bc6-9d7a-d741b265d003 service nova] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.317032] env[63379]: DEBUG oslo_concurrency.lockutils [req-2c666984-3686-4ead-b8cd-4df7ef4dff47 req-3c54d042-4593-4bc6-9d7a-d741b265d003 service nova] Acquired lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.317861] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a287633-c79d-49f0-a9ea-56eb477eb3f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.337115] env[63379]: DEBUG oslo_concurrency.lockutils [req-2c666984-3686-4ead-b8cd-4df7ef4dff47 req-3c54d042-4593-4bc6-9d7a-d741b265d003 service nova] Releasing lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.337404] env[63379]: WARNING nova.compute.manager [req-2c666984-3686-4ead-b8cd-4df7ef4dff47 req-3c54d042-4593-4bc6-9d7a-d741b265d003 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Detach interface failed, port_id=1ede15ef-deb2-4892-b7bc-b98c45fd7fcb, reason: No device with interface-id 1ede15ef-deb2-4892-b7bc-b98c45fd7fcb exists on VM: nova.exception.NotFound: No device with interface-id 1ede15ef-deb2-4892-b7bc-b98c45fd7fcb exists on VM [ 1867.554476] env[63379]: DEBUG oslo_concurrency.lockutils [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.554684] env[63379]: DEBUG oslo_concurrency.lockutils [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Acquired lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.556108] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae215fb-b94b-43f4-8c5e-23a0ac65c5d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.574225] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a9003a-6bcb-4cb2-91e7-276d9387a965 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.600749] env[63379]: DEBUG nova.virt.vmwareapi.vmops [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfiguring VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1867.601637] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a38609d8-2357-42d1-ae35-5887a7114c4d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.614985] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.622551] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Waiting for the task: (returnval){ [ 1867.622551] env[63379]: value = "task-1780196" [ 1867.622551] env[63379]: _type = "Task" [ 1867.622551] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.632940] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.657355] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1867.657676] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1867.658679] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144167e7-6dff-428c-a749-6e315f78427a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.674720] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a359d77e-3b02-42b5-bdfe-7c9e65e70b35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.701651] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2/volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1867.701964] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fec6cba-9304-4108-85a6-f94cb1bd3e94 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.722993] env[63379]: DEBUG oslo_vmware.api [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1867.722993] env[63379]: value = "task-1780197" [ 1867.722993] env[63379]: _type = "Task" [ 1867.722993] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.737240] env[63379]: DEBUG oslo_vmware.api [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780197, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.793827] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.794053] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.794251] env[63379]: DEBUG nova.network.neutron [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1867.797176] env[63379]: DEBUG nova.network.neutron [-] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.832949] env[63379]: DEBUG nova.compute.manager [None req-a993e462-bbfa-481e-a68c-794ece797a1e tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Found 2 images (rotation: 2) {{(pid=63379) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1867.945030] env[63379]: DEBUG nova.network.neutron [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.132827] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.232463] env[63379]: DEBUG oslo_vmware.api [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780197, 'name': ReconfigVM_Task, 'duration_secs': 0.341132} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.232823] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfigured VM instance instance-00000061 to attach disk [datastore1] volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2/volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1868.237426] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-147e8e1f-5884-4650-8d1a-64aae9e7cd78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.252630] env[63379]: DEBUG oslo_vmware.api [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1868.252630] env[63379]: value = "task-1780198" [ 1868.252630] env[63379]: _type = "Task" [ 1868.252630] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.260709] env[63379]: DEBUG oslo_vmware.api [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.300661] env[63379]: INFO nova.compute.manager [-] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Took 1.49 seconds to deallocate network for instance. [ 1868.366342] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "88dae632-b363-4187-9198-e4300783d420" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.366606] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.366805] env[63379]: DEBUG nova.compute.manager [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Going to confirm migration 5 {{(pid=63379) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1868.445469] env[63379]: DEBUG nova.compute.manager [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1868.446123] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d060a45-b5bf-43dc-bb18-db47ef5bc789 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.449850] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.634616] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.635690] env[63379]: DEBUG nova.network.neutron [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [{"id": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "address": "fa:16:3e:bc:e7:16", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2313903-6e", "ovs_interfaceid": "c2313903-6e4e-42f8-be0f-3c00be1c0fec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.762605] env[63379]: DEBUG oslo_vmware.api [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780198, 'name': ReconfigVM_Task, 'duration_secs': 0.134163} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.762922] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1868.809077] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.809339] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.809558] env[63379]: DEBUG nova.objects.instance [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'resources' on Instance uuid 815d0af5-e9a8-4475-9414-42715ea32d6a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1868.836870] env[63379]: DEBUG nova.compute.manager [req-e5153063-5497-4d67-af1f-9e49f80781cc req-8abc45de-47e2-4795-bb16-5cf4c332d412 service nova] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Received event network-vif-deleted-e11104ca-6957-4cad-9666-a5c91da87b62 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1868.928110] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.928374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquired lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.928567] env[63379]: DEBUG nova.network.neutron [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1868.928753] env[63379]: DEBUG nova.objects.instance [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'info_cache' on Instance uuid 88dae632-b363-4187-9198-e4300783d420 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1868.959725] env[63379]: INFO nova.compute.manager [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] instance snapshotting [ 1868.960290] env[63379]: DEBUG nova.objects.instance [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'flavor' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.133440] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.138062] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.433850] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24ffc87-cfd1-456a-b5e1-1f15b60c783a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.443148] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff50b16-dda3-41d1-b927-01ced42af115 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.485660] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01687dbb-1bdd-4dea-a895-60dd31080929 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.490190] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45f351e-59ae-42e2-a7e0-1b1e08f5f3c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.511020] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a35cf2-e29c-4bfe-ac5f-b3458906a65c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.515138] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34253226-c862-405a-ad79-8d6345a4000b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.527805] env[63379]: DEBUG nova.compute.provider_tree [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1869.634434] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.641462] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db11a866-f02e-4861-bbcc-d9a3bf0038d4 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-8078bac6-146a-4e3a-a7a7-7093f617a330-1ede15ef-deb2-4892-b7bc-b98c45fd7fcb" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 12.187s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.799116] env[63379]: DEBUG nova.objects.instance [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'flavor' on Instance uuid 2be6bdea-416e-4912-8930-3c4e4f194f99 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1869.994768] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cedb65-b944-43fe-8425-0d96f8b75a63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.016199] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance '90f0c97d-695b-4975-8ab9-4e77a9175da1' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1870.031283] env[63379]: DEBUG nova.scheduler.client.report [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1870.037742] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1870.038270] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7613dc02-841f-4a83-adb5-1932c6243df7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.045905] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1870.045905] env[63379]: value = "task-1780199" [ 1870.045905] env[63379]: _type = "Task" [ 1870.045905] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.055832] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780199, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.135934] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.189283] env[63379]: DEBUG nova.network.neutron [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance_info_cache with network_info: [{"id": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "address": "fa:16:3e:b4:7f:07", "network": {"id": "f43cdd88-dc3a-4cc6-af5d-da244f472d78", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-715557899-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "928a9d102f0e45b897eae72fa566c0fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23fc30ea-1f06-424d-86e1-27ae5435b1a9", "external-id": "nsx-vlan-transportzone-189", "segmentation_id": 189, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap083ff06b-fb", "ovs_interfaceid": "083ff06b-fbdc-4b0f-9c47-6fce99aa11ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.304969] env[63379]: DEBUG oslo_concurrency.lockutils [None req-66770d2e-4524-456c-a7f3-2babd56786be tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.275s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.522485] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1870.522825] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-960196ce-5e14-46b2-9b43-591933945ad1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.530135] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1870.530135] env[63379]: value = "task-1780200" [ 1870.530135] env[63379]: _type = "Task" [ 1870.530135] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.538147] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.538806] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.555555] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780199, 'name': CreateSnapshot_Task, 'duration_secs': 0.47361} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.555840] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1870.556661] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0484dd-24aa-4537-9dd0-8963cd0ce0db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.559899] env[63379]: INFO nova.scheduler.client.report [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted allocations for instance 815d0af5-e9a8-4475-9414-42715ea32d6a [ 1870.635806] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.644417] env[63379]: INFO nova.compute.manager [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Rebuilding instance [ 1870.684164] env[63379]: DEBUG nova.compute.manager [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1870.685057] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7ae64b-5fda-4204-8978-f16e177bba3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.692172] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Releasing lock "refresh_cache-88dae632-b363-4187-9198-e4300783d420" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.692412] env[63379]: DEBUG nova.objects.instance [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lazy-loading 'migration_context' on Instance uuid 88dae632-b363-4187-9198-e4300783d420 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1871.043546] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780200, 'name': PowerOffVM_Task, 'duration_secs': 0.225133} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.043737] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1871.043967] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance '90f0c97d-695b-4975-8ab9-4e77a9175da1' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1871.079768] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1871.080424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-74115a02-89a6-48e9-b230-f9980ed1d25f tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "815d0af5-e9a8-4475-9414-42715ea32d6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.687s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.081422] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-29ed04e9-a263-43ab-be07-96d93ca076f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.090723] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1871.090723] env[63379]: value = "task-1780201" [ 1871.090723] env[63379]: _type = "Task" [ 1871.090723] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.100634] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780201, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.135368] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.198430] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1871.199212] env[63379]: DEBUG nova.objects.base [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Object Instance<88dae632-b363-4187-9198-e4300783d420> lazy-loaded attributes: info_cache,migration_context {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1871.199542] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c9dff87-04a8-4362-a92a-7f32d83e737d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.201942] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64c757b-766c-4794-a601-2880a82bf724 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.222762] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3956b485-5853-4b7f-a862-c5be1f193f67 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.225283] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1871.225283] env[63379]: value = "task-1780202" [ 1871.225283] env[63379]: _type = "Task" [ 1871.225283] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.230304] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1871.230304] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526d00a2-79d4-5154-8e92-bce669e44050" [ 1871.230304] env[63379]: _type = "Task" [ 1871.230304] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.236430] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.241898] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526d00a2-79d4-5154-8e92-bce669e44050, 'name': SearchDatastore_Task, 'duration_secs': 0.008974} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.242232] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.242477] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.551795] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1871.552193] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1871.552380] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1871.552624] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1871.552791] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1871.552949] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1871.553198] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1871.553414] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1871.553665] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1871.553887] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1871.554152] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1871.560809] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f656bc2-2c24-40e3-b858-98ea0b2d3c3a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.577775] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1871.577775] env[63379]: value = "task-1780203" [ 1871.577775] env[63379]: _type = "Task" [ 1871.577775] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.586682] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780203, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.600057] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780201, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.637040] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.734775] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780202, 'name': PowerOffVM_Task, 'duration_secs': 0.345781} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.735066] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1871.784680] env[63379]: INFO nova.compute.manager [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Detaching volume 6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2 [ 1871.814956] env[63379]: INFO nova.virt.block_device [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Attempting to driver detach volume 6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2 from mountpoint /dev/sdb [ 1871.815230] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1871.815531] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1871.816422] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1017e266-0118-4aa7-8c45-c854f739e183 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.843084] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d6bafe-5024-411d-9ffa-6b58059ec027 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.850629] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3186456-4093-428c-af5f-f75f4bf408e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.873313] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b235c7-3fc3-429d-9645-83045c7a53be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.888446] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] The volume has not been displaced from its original location: [datastore1] volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2/volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1871.893916] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1871.896476] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95c93585-0785-41ac-acdd-80cd359b8261 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.915359] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1871.915359] env[63379]: value = "task-1780204" [ 1871.915359] env[63379]: _type = "Task" [ 1871.915359] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.925802] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780204, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.943089] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc507aba-2aed-44f9-b5a2-d13cdba5047a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.946267] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.946536] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.952396] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c23188-5d1c-41fa-811f-5bf995b0f493 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.985040] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf43a5d-0ed6-496a-9235-111035a0858b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.992522] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c4dbf7-6107-4473-905d-629f590913db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.006190] env[63379]: DEBUG nova.compute.provider_tree [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.088996] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780203, 'name': ReconfigVM_Task, 'duration_secs': 0.202197} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.089391] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance '90f0c97d-695b-4975-8ab9-4e77a9175da1' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1872.101565] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780201, 'name': CloneVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.140214] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.427044] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780204, 'name': ReconfigVM_Task, 'duration_secs': 0.199808} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.427044] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1872.431267] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef258de7-b71c-4127-be5a-e5911bfa3a85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.447135] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1872.447135] env[63379]: value = "task-1780205" [ 1872.447135] env[63379]: _type = "Task" [ 1872.447135] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.451035] env[63379]: INFO nova.compute.manager [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Detaching volume e897c929-908c-41dd-b7b3-54172d033a9c [ 1872.457361] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780205, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.483459] env[63379]: INFO nova.virt.block_device [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Attempting to driver detach volume e897c929-908c-41dd-b7b3-54172d033a9c from mountpoint /dev/sdb [ 1872.483704] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1872.483927] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369475', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'name': 'volume-e897c929-908c-41dd-b7b3-54172d033a9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19941838-d6b0-4fb8-9d06-f4a1b80ba428', 'attached_at': '', 'detached_at': '', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'serial': 'e897c929-908c-41dd-b7b3-54172d033a9c'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1872.484806] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71443bf2-8c6f-4beb-9dc5-dfd70b07ae62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.505802] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c108bd6-011a-4789-a4e8-3a20b1f96ea8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.509046] env[63379]: DEBUG nova.scheduler.client.report [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1872.516659] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c423639-f54f-46f9-a202-7a96fdf2bc98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.539553] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5053e63b-f94a-4884-80c8-e9be867c97ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.555066] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] The volume has not been displaced from its original location: [datastore1] volume-e897c929-908c-41dd-b7b3-54172d033a9c/volume-e897c929-908c-41dd-b7b3-54172d033a9c.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1872.560397] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Reconfiguring VM instance instance-0000004f to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1872.561043] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12d0f768-de18-4c29-a799-9d41aa01dc8a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.579849] env[63379]: DEBUG oslo_vmware.api [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1872.579849] env[63379]: value = "task-1780206" [ 1872.579849] env[63379]: _type = "Task" [ 1872.579849] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.587901] env[63379]: DEBUG oslo_vmware.api [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780206, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.598096] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1872.598337] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1872.598502] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1872.598692] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1872.598846] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1872.599014] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1872.599226] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1872.599399] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1872.599598] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1872.599774] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1872.599952] env[63379]: DEBUG nova.virt.hardware [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1872.605095] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Reconfiguring VM instance instance-0000001f to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1872.605403] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1946b9d9-b8b6-41e0-836b-ae1d2dd104de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.623605] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780201, 'name': CloneVM_Task, 'duration_secs': 1.073874} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.624767] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Created linked-clone VM from snapshot [ 1872.625096] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1872.625096] env[63379]: value = "task-1780207" [ 1872.625096] env[63379]: _type = "Task" [ 1872.625096] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.625824] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a807e66-1d06-463d-886a-8a808f125b1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.638688] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780207, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.644455] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Uploading image a8193459-640b-4a3b-9ea5-7376bbbafd60 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1872.646358] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.673397] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1872.673397] env[63379]: value = "vm-369490" [ 1872.673397] env[63379]: _type = "VirtualMachine" [ 1872.673397] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1872.673397] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e9e8fc27-8fdc-4810-a4b7-650f033a65da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.680306] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease: (returnval){ [ 1872.680306] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c33644-2f21-3800-a24e-ac2ade0ab401" [ 1872.680306] env[63379]: _type = "HttpNfcLease" [ 1872.680306] env[63379]: } obtained for exporting VM: (result){ [ 1872.680306] env[63379]: value = "vm-369490" [ 1872.680306] env[63379]: _type = "VirtualMachine" [ 1872.680306] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1872.680661] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the lease: (returnval){ [ 1872.680661] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c33644-2f21-3800-a24e-ac2ade0ab401" [ 1872.680661] env[63379]: _type = "HttpNfcLease" [ 1872.680661] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1872.687927] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1872.687927] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c33644-2f21-3800-a24e-ac2ade0ab401" [ 1872.687927] env[63379]: _type = "HttpNfcLease" [ 1872.687927] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1872.957695] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780205, 'name': ReconfigVM_Task, 'duration_secs': 0.270343} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.957957] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1873.090091] env[63379]: DEBUG oslo_vmware.api [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780206, 'name': ReconfigVM_Task, 'duration_secs': 0.282587} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.090408] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Reconfigured VM instance instance-0000004f to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1873.095322] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10820cb9-7c07-41e8-a77f-6b2ca9e459a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.110635] env[63379]: DEBUG oslo_vmware.api [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1873.110635] env[63379]: value = "task-1780209" [ 1873.110635] env[63379]: _type = "Task" [ 1873.110635] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.118636] env[63379]: DEBUG oslo_vmware.api [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.141915] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.142180] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780207, 'name': ReconfigVM_Task, 'duration_secs': 0.179892} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.142429] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Reconfigured VM instance instance-0000001f to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1873.143213] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29757bc1-fa14-4de4-9898-18ef7f2b5b14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.164764] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 90f0c97d-695b-4975-8ab9-4e77a9175da1/90f0c97d-695b-4975-8ab9-4e77a9175da1.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1873.165420] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfb5a80f-ec21-4a96-9d17-8ebae645bdd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.185702] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1873.185702] env[63379]: value = "task-1780210" [ 1873.185702] env[63379]: _type = "Task" [ 1873.185702] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.187259] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1873.187259] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c33644-2f21-3800-a24e-ac2ade0ab401" [ 1873.187259] env[63379]: _type = "HttpNfcLease" [ 1873.187259] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1873.190159] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1873.190159] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c33644-2f21-3800-a24e-ac2ade0ab401" [ 1873.190159] env[63379]: _type = "HttpNfcLease" [ 1873.190159] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1873.191056] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f40bd33-167b-4dc3-bf78-81d3032abdcd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.200912] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780210, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.201187] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c2153-4dd6-ed80-2af6-12db1d75cf99/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1873.201352] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c2153-4dd6-ed80-2af6-12db1d75cf99/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1873.286984] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0deadfcd-aed5-4607-9081-50358606780f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.519173] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.276s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.620421] env[63379]: DEBUG oslo_vmware.api [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780209, 'name': ReconfigVM_Task, 'duration_secs': 0.162762} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.621108] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369475', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'name': 'volume-e897c929-908c-41dd-b7b3-54172d033a9c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '19941838-d6b0-4fb8-9d06-f4a1b80ba428', 'attached_at': '', 'detached_at': '', 'volume_id': 'e897c929-908c-41dd-b7b3-54172d033a9c', 'serial': 'e897c929-908c-41dd-b7b3-54172d033a9c'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1873.638300] env[63379]: DEBUG oslo_vmware.api [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Task: {'id': task-1780196, 'name': ReconfigVM_Task, 'duration_secs': 5.765174} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.638672] env[63379]: DEBUG oslo_concurrency.lockutils [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] Releasing lock "8078bac6-146a-4e3a-a7a7-7093f617a330" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.638954] env[63379]: DEBUG nova.virt.vmwareapi.vmops [req-965f0d70-e912-45bc-a860-5a4ac70220ac req-45a80a9a-d47f-43bf-b477-7864601a8525 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Reconfigured VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1873.639512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.025s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.639775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.640045] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.640267] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.642288] env[63379]: INFO nova.compute.manager [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Terminating instance [ 1873.646769] env[63379]: DEBUG nova.compute.manager [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1873.646980] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1873.647841] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe560caf-a63b-457c-810f-27c72c3dcd09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.655182] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1873.655456] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b617c08-afe1-40ee-b4cb-3721e4899107 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.662185] env[63379]: DEBUG oslo_vmware.api [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1873.662185] env[63379]: value = "task-1780211" [ 1873.662185] env[63379]: _type = "Task" [ 1873.662185] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.670375] env[63379]: DEBUG oslo_vmware.api [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.697770] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780210, 'name': ReconfigVM_Task, 'duration_secs': 0.32169} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.698144] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 90f0c97d-695b-4975-8ab9-4e77a9175da1/90f0c97d-695b-4975-8ab9-4e77a9175da1.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1873.701841] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance '90f0c97d-695b-4975-8ab9-4e77a9175da1' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1874.005855] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1874.006618] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ace5e90-49ae-49fe-82a8-528b28ef0cac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.013876] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1874.013876] env[63379]: value = "task-1780212" [ 1874.013876] env[63379]: _type = "Task" [ 1874.013876] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.021752] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.084324] env[63379]: INFO nova.scheduler.client.report [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted allocation for migration bcef3c59-0512-4ea1-9f60-aa29a0ecea06 [ 1874.168750] env[63379]: DEBUG nova.objects.instance [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'flavor' on Instance uuid 19941838-d6b0-4fb8-9d06-f4a1b80ba428 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1874.178987] env[63379]: DEBUG oslo_vmware.api [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780211, 'name': PowerOffVM_Task, 'duration_secs': 0.154542} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.179501] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1874.182884] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1874.182884] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c84c101e-7417-47db-90e4-fea2da25dd3c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.208199] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67839afe-a56b-466d-b414-f9df04583860 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.232047] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2e2745-56b0-4318-baeb-2325f1e65128 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.250812] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance '90f0c97d-695b-4975-8ab9-4e77a9175da1' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1874.268713] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.268713] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.268713] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleting the datastore file [datastore1] 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.268713] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7481bfa5-96c4-4b81-9f2c-ded528a1e09e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.274402] env[63379]: DEBUG oslo_vmware.api [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1874.274402] env[63379]: value = "task-1780214" [ 1874.274402] env[63379]: _type = "Task" [ 1874.274402] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.282606] env[63379]: DEBUG oslo_vmware.api [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.524423] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1874.524719] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1874.524997] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1874.525833] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca83c3ea-91b1-41d8-8350-dbdb8ed1a886 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.544308] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fe64e2-a8df-4963-b2ad-ca35ab6e275f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.550817] env[63379]: WARNING nova.virt.vmwareapi.driver [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1874.551161] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1874.551915] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a2e6a5-518e-47c2-9a7f-80ccbaab8485 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.559255] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1874.559501] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b109bfc7-9279-415a-a1af-f68b61e6c5e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.591393] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.225s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.784519] env[63379]: DEBUG oslo_vmware.api [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171083} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.784889] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1874.785037] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1874.785311] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1874.785626] env[63379]: INFO nova.compute.manager [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1874.785879] env[63379]: DEBUG oslo.service.loopingcall [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.786124] env[63379]: DEBUG nova.compute.manager [-] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1874.786315] env[63379]: DEBUG nova.network.neutron [-] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1874.815198] env[63379]: DEBUG nova.network.neutron [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Port ef820562-0de4-462d-a51d-13e4a4929719 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1874.841410] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1874.841519] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1874.841768] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleting the datastore file [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1874.842058] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71bd517f-ea65-4479-8b3e-cb8e823f33eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.851193] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1874.851193] env[63379]: value = "task-1780216" [ 1874.851193] env[63379]: _type = "Task" [ 1874.851193] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.860238] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.176449] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2d4cc557-1b41-4130-b732-ad7cb95e3578 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.230s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.361892] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148792} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.362903] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1875.362903] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1875.362903] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1875.382202] env[63379]: DEBUG nova.compute.manager [req-392920df-b20d-4d04-9f78-3f81e8ade1d4 req-e4a3c489-74ca-4306-be0d-dd51fbc84293 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Received event network-vif-deleted-c2313903-6e4e-42f8-be0f-3c00be1c0fec {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1875.382202] env[63379]: INFO nova.compute.manager [req-392920df-b20d-4d04-9f78-3f81e8ade1d4 req-e4a3c489-74ca-4306-be0d-dd51fbc84293 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Neutron deleted interface c2313903-6e4e-42f8-be0f-3c00be1c0fec; detaching it from the instance and deleting it from the info cache [ 1875.382202] env[63379]: DEBUG nova.network.neutron [req-392920df-b20d-4d04-9f78-3f81e8ade1d4 req-e4a3c489-74ca-4306-be0d-dd51fbc84293 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.471342] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.471741] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.472059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.472352] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.472632] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.475326] env[63379]: INFO nova.compute.manager [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Terminating instance [ 1875.477300] env[63379]: DEBUG nova.compute.manager [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1875.477520] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1875.478595] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999ceaa7-88f7-492c-9221-d80529f7eb3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.487861] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1875.488129] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcd862bd-1c92-4b05-ac4a-2cef05ba2658 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.497048] env[63379]: DEBUG oslo_vmware.api [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1875.497048] env[63379]: value = "task-1780217" [ 1875.497048] env[63379]: _type = "Task" [ 1875.497048] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.502311] env[63379]: DEBUG oslo_vmware.api [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.764508] env[63379]: DEBUG nova.network.neutron [-] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.840322] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.840747] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.840824] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.867936] env[63379]: INFO nova.virt.block_device [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Booting with volume 6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2 at /dev/sdb [ 1875.883993] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ddc7225-7f8f-41b1-998b-c5df02dcea44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.891364] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "88dae632-b363-4187-9198-e4300783d420" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.891632] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.891843] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "88dae632-b363-4187-9198-e4300783d420-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.892041] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.892223] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.899532] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270d6394-073f-4186-9f6b-4d477f1042b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.911499] env[63379]: INFO nova.compute.manager [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Terminating instance [ 1875.913323] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34c016e5-3252-43a5-9935-659305686243 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.915503] env[63379]: DEBUG nova.compute.manager [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1875.915754] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1875.916941] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcc7817-03fb-495a-bfc1-9f4ca1e52069 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.924581] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1875.925675] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0dc5f9e5-d15c-41c1-b91a-4f3ee770533e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.937480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a013c935-a5bb-4f18-9101-1b3aeada503c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.948024] env[63379]: DEBUG nova.compute.manager [req-392920df-b20d-4d04-9f78-3f81e8ade1d4 req-e4a3c489-74ca-4306-be0d-dd51fbc84293 service nova] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Detach interface failed, port_id=c2313903-6e4e-42f8-be0f-3c00be1c0fec, reason: Instance 8078bac6-146a-4e3a-a7a7-7093f617a330 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1875.953295] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1875.953295] env[63379]: value = "task-1780218" [ 1875.953295] env[63379]: _type = "Task" [ 1875.953295] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.961710] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.970108] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45e9ec27-56cf-48b9-b71b-2555565c3614 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.978733] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f912db-2357-41d6-8e9c-16ec4fd2108a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.012257] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb2d442-34f0-4cd4-ba0b-e3d46e115614 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.015405] env[63379]: DEBUG oslo_vmware.api [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780217, 'name': PowerOffVM_Task, 'duration_secs': 0.222513} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.015707] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1876.015930] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1876.017035] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaad926b-86fc-4b78-a995-bc9b39dc081c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.021038] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0ff8e1-c1df-4843-b858-aef1ffa71409 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.042712] env[63379]: DEBUG nova.virt.block_device [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updating existing volume attachment record: feb7db0c-feca-4459-ac8a-abec05533f2e {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1876.107059] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1876.107407] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1876.107700] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleting the datastore file [datastore1] 19941838-d6b0-4fb8-9d06-f4a1b80ba428 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1876.108042] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03e18595-17c7-4b60-8cc0-3e8fe6d02bb9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.115158] env[63379]: DEBUG oslo_vmware.api [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1876.115158] env[63379]: value = "task-1780220" [ 1876.115158] env[63379]: _type = "Task" [ 1876.115158] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.124317] env[63379]: DEBUG oslo_vmware.api [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.267417] env[63379]: INFO nova.compute.manager [-] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Took 1.48 seconds to deallocate network for instance. [ 1876.464078] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780218, 'name': PowerOffVM_Task, 'duration_secs': 0.194431} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.464402] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1876.464584] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1876.464851] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd65cee3-0c30-4d47-a794-8013cdb9afe7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.612501] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1876.612797] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1876.613030] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleting the datastore file [datastore1] 88dae632-b363-4187-9198-e4300783d420 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1876.613308] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-507b752f-ebcf-4d67-a906-57f40409f27b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.620483] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for the task: (returnval){ [ 1876.620483] env[63379]: value = "task-1780222" [ 1876.620483] env[63379]: _type = "Task" [ 1876.620483] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.626772] env[63379]: DEBUG oslo_vmware.api [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269895} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.627398] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1876.627615] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1876.627804] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1876.627983] env[63379]: INFO nova.compute.manager [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1876.628242] env[63379]: DEBUG oslo.service.loopingcall [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1876.628453] env[63379]: DEBUG nova.compute.manager [-] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1876.628526] env[63379]: DEBUG nova.network.neutron [-] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1876.632852] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.774400] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.774799] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.775074] env[63379]: DEBUG nova.objects.instance [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'resources' on Instance uuid 8078bac6-146a-4e3a-a7a7-7093f617a330 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.874969] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.875283] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.875423] env[63379]: DEBUG nova.network.neutron [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1877.131292] env[63379]: DEBUG oslo_vmware.api [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Task: {'id': task-1780222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198335} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.131593] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1877.132206] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1877.132206] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1877.132206] env[63379]: INFO nova.compute.manager [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] [instance: 88dae632-b363-4187-9198-e4300783d420] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1877.132413] env[63379]: DEBUG oslo.service.loopingcall [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.132620] env[63379]: DEBUG nova.compute.manager [-] [instance: 88dae632-b363-4187-9198-e4300783d420] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1877.132718] env[63379]: DEBUG nova.network.neutron [-] [instance: 88dae632-b363-4187-9198-e4300783d420] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1877.409469] env[63379]: DEBUG nova.compute.manager [req-2656041e-36c3-4e8d-9329-ce06a70fb89f req-10198c16-c8aa-4ac1-9eb2-5ec559b9ba58 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Received event network-vif-deleted-083ff06b-fbdc-4b0f-9c47-6fce99aa11ac {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1877.409761] env[63379]: INFO nova.compute.manager [req-2656041e-36c3-4e8d-9329-ce06a70fb89f req-10198c16-c8aa-4ac1-9eb2-5ec559b9ba58 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Neutron deleted interface 083ff06b-fbdc-4b0f-9c47-6fce99aa11ac; detaching it from the instance and deleting it from the info cache [ 1877.409967] env[63379]: DEBUG nova.network.neutron [req-2656041e-36c3-4e8d-9329-ce06a70fb89f req-10198c16-c8aa-4ac1-9eb2-5ec559b9ba58 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.434387] env[63379]: DEBUG nova.compute.manager [req-e11dc686-b23a-4c80-a6d7-d10ee88135e3 req-e75bd050-1d26-40a0-b5c2-fb6da65c23fa service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Received event network-vif-deleted-9d972f73-e98a-4c4d-8551-f7db527be2a7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1877.434572] env[63379]: INFO nova.compute.manager [req-e11dc686-b23a-4c80-a6d7-d10ee88135e3 req-e75bd050-1d26-40a0-b5c2-fb6da65c23fa service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Neutron deleted interface 9d972f73-e98a-4c4d-8551-f7db527be2a7; detaching it from the instance and deleting it from the info cache [ 1877.435354] env[63379]: DEBUG nova.network.neutron [req-e11dc686-b23a-4c80-a6d7-d10ee88135e3 req-e75bd050-1d26-40a0-b5c2-fb6da65c23fa service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.451764] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4601d705-9cc2-4b66-b133-15e7fa9adad8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.460716] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed9a0e8-e39b-4213-a25c-d673868dc666 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.494053] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d4408f-fd04-4158-9209-b47fcfb0ce73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.501761] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f126ea-d04a-4566-a18c-5a9fd29c7d15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.515793] env[63379]: DEBUG nova.compute.provider_tree [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1877.654128] env[63379]: DEBUG nova.network.neutron [-] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.699274] env[63379]: DEBUG nova.network.neutron [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.882412] env[63379]: DEBUG nova.network.neutron [-] [instance: 88dae632-b363-4187-9198-e4300783d420] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.913540] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6dd5700f-7f85-4a0e-8376-db4c1d063c9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.924755] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585b7699-a089-4ca1-8c84-1924562e0418 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.940108] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b8e1fa3-686e-42da-aa57-313af77a083a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.949314] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d25175a-9120-42fa-87fb-513254ff4d75 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.967195] env[63379]: DEBUG nova.compute.manager [req-2656041e-36c3-4e8d-9329-ce06a70fb89f req-10198c16-c8aa-4ac1-9eb2-5ec559b9ba58 service nova] [instance: 88dae632-b363-4187-9198-e4300783d420] Detach interface failed, port_id=083ff06b-fbdc-4b0f-9c47-6fce99aa11ac, reason: Instance 88dae632-b363-4187-9198-e4300783d420 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1877.984777] env[63379]: DEBUG nova.compute.manager [req-e11dc686-b23a-4c80-a6d7-d10ee88135e3 req-e75bd050-1d26-40a0-b5c2-fb6da65c23fa service nova] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Detach interface failed, port_id=9d972f73-e98a-4c4d-8551-f7db527be2a7, reason: Instance 19941838-d6b0-4fb8-9d06-f4a1b80ba428 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1878.036746] env[63379]: ERROR nova.scheduler.client.report [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [req-8f327d5a-1aa4-4bec-bf28-775815b374ee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8f327d5a-1aa4-4bec-bf28-775815b374ee"}]} [ 1878.055743] env[63379]: DEBUG nova.scheduler.client.report [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1878.069913] env[63379]: DEBUG nova.scheduler.client.report [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1878.070218] env[63379]: DEBUG nova.compute.provider_tree [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1878.081482] env[63379]: DEBUG nova.scheduler.client.report [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1878.100149] env[63379]: DEBUG nova.scheduler.client.report [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1878.156051] env[63379]: INFO nova.compute.manager [-] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Took 1.53 seconds to deallocate network for instance. [ 1878.171995] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1878.172336] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1878.172433] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1878.173105] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1878.173105] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1878.173105] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1878.173208] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1878.173294] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1878.173468] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1878.173637] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1878.173817] env[63379]: DEBUG nova.virt.hardware [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1878.174717] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeae3833-bf9a-46b8-ac8d-5ade9788fac2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.185528] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3148384c-d318-47d6-956f-107495a54379 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.203438] env[63379]: DEBUG oslo_concurrency.lockutils [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.207229] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:3c:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7fd4937-49e3-4d89-8fed-cc6c052fc1c0', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1878.214769] env[63379]: DEBUG oslo.service.loopingcall [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.217973] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1878.218509] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-777c5602-d544-420b-bd76-f770a575b6ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.240579] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1878.240579] env[63379]: value = "task-1780223" [ 1878.240579] env[63379]: _type = "Task" [ 1878.240579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.245433] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33cd84d-8601-4b87-8124-86e564f73193 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.250931] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780223, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.256534] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fc4e13-5a13-4d43-8aa4-e157dc0d0966 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.290021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f98db79-599f-40b6-b587-4efc1a305611 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.296373] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf494502-943b-4ecc-af68-d09fccfa887d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.310328] env[63379]: DEBUG nova.compute.provider_tree [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1878.384886] env[63379]: INFO nova.compute.manager [-] [instance: 88dae632-b363-4187-9198-e4300783d420] Took 1.25 seconds to deallocate network for instance. [ 1878.663461] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.731806] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f467197-4d4f-4336-a82d-7e677604491e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.754352] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32494c8-cda3-4ea2-a121-ad0d9c61f4d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.762578] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780223, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.765115] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance '90f0c97d-695b-4975-8ab9-4e77a9175da1' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1878.842996] env[63379]: DEBUG nova.scheduler.client.report [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 136 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1878.843293] env[63379]: DEBUG nova.compute.provider_tree [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 136 to 137 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1878.843481] env[63379]: DEBUG nova.compute.provider_tree [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1878.892219] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.261160] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780223, 'name': CreateVM_Task, 'duration_secs': 0.598935} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.261329] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1879.262042] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.262205] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.262584] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1879.262852] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe41fac3-cf63-40c4-bb53-cd7694892d2f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.267569] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1879.267569] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524851b3-281c-4c28-05c2-6519f8d2ed97" [ 1879.267569] env[63379]: _type = "Task" [ 1879.267569] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.272418] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1879.275742] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a6701f1-a565-4e3a-a4b0-b56d873be870 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.277285] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524851b3-281c-4c28-05c2-6519f8d2ed97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.283124] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1879.283124] env[63379]: value = "task-1780224" [ 1879.283124] env[63379]: _type = "Task" [ 1879.283124] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.296062] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780224, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.349207] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.574s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.351535] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.688s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.351782] env[63379]: DEBUG nova.objects.instance [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'resources' on Instance uuid 19941838-d6b0-4fb8-9d06-f4a1b80ba428 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1879.371053] env[63379]: INFO nova.scheduler.client.report [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted allocations for instance 8078bac6-146a-4e3a-a7a7-7093f617a330 [ 1879.777810] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524851b3-281c-4c28-05c2-6519f8d2ed97, 'name': SearchDatastore_Task, 'duration_secs': 0.013408} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.778218] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.778507] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1879.778793] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.778979] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.779253] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1879.779547] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-828402fb-5ee4-406b-9cf8-73b26225d79f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.792274] env[63379]: DEBUG oslo_vmware.api [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780224, 'name': PowerOnVM_Task, 'duration_secs': 0.477556} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.793089] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1879.793296] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-84caf45d-21ed-4824-8a07-e866ada60d8d tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance '90f0c97d-695b-4975-8ab9-4e77a9175da1' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1879.796960] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1879.798060] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1879.799010] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aef2699-2656-42f4-bcf2-3e960fc5fbee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.805135] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1879.805135] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52634148-4db6-95bd-d243-5258f17880a2" [ 1879.805135] env[63379]: _type = "Task" [ 1879.805135] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.814681] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52634148-4db6-95bd-d243-5258f17880a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.878408] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e72700e7-bf67-4af3-ab38-1b566ba700d8 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "8078bac6-146a-4e3a-a7a7-7093f617a330" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.239s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.960964] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3813cc-ba54-460b-a432-ae08227f1220 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.969046] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ac84cc-d4aa-4939-aa4a-9ecaee8de698 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.998408] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3f8efd-a385-48e1-b371-074ca1a5ce91 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.006428] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da39f876-8a41-4669-aa42-8ec1f134afba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.021427] env[63379]: DEBUG nova.compute.provider_tree [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1880.315936] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52634148-4db6-95bd-d243-5258f17880a2, 'name': SearchDatastore_Task, 'duration_secs': 0.010535} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.317450] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6646cd13-6e0b-46bd-bf05-c2e8d9f57e71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.323686] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1880.323686] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa339f-306c-b853-cc11-d37aa122792c" [ 1880.323686] env[63379]: _type = "Task" [ 1880.323686] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.331976] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa339f-306c-b853-cc11-d37aa122792c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.525370] env[63379]: DEBUG nova.scheduler.client.report [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1880.815271] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c2153-4dd6-ed80-2af6-12db1d75cf99/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1880.816248] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0796cad-8a23-4e38-b7ef-bb8259a1b2db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.822615] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c2153-4dd6-ed80-2af6-12db1d75cf99/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1880.824412] env[63379]: ERROR oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c2153-4dd6-ed80-2af6-12db1d75cf99/disk-0.vmdk due to incomplete transfer. [ 1880.824412] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f02f4946-a29b-48f0-bf13-1633e11d6578 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.833703] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aa339f-306c-b853-cc11-d37aa122792c, 'name': SearchDatastore_Task, 'duration_secs': 0.011498} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.834750] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.835033] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1880.835302] env[63379]: DEBUG oslo_vmware.rw_handles [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c2153-4dd6-ed80-2af6-12db1d75cf99/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1880.835486] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Uploaded image a8193459-640b-4a3b-9ea5-7376bbbafd60 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1880.837817] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1880.838085] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-402fd4ad-4850-4d1d-b37b-b6a7a9259667 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.839869] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b8f7d6b2-820a-4881-8af8-3711c098cdc8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.845977] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1880.845977] env[63379]: value = "task-1780225" [ 1880.845977] env[63379]: _type = "Task" [ 1880.845977] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.846888] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1880.846888] env[63379]: value = "task-1780226" [ 1880.846888] env[63379]: _type = "Task" [ 1880.846888] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.859035] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.862230] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780226, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.031593] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.034668] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.143s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.035425] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.056378] env[63379]: INFO nova.scheduler.client.report [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Deleted allocations for instance 88dae632-b363-4187-9198-e4300783d420 [ 1881.058191] env[63379]: INFO nova.scheduler.client.report [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted allocations for instance 19941838-d6b0-4fb8-9d06-f4a1b80ba428 [ 1881.365028] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780225, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.365028] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780226, 'name': Destroy_Task, 'duration_secs': 0.374044} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.365028] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Destroyed the VM [ 1881.365028] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1881.365028] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-01e37d83-e7e8-4d0f-89d4-d5697c12254d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.376022] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1881.376022] env[63379]: value = "task-1780227" [ 1881.376022] env[63379]: _type = "Task" [ 1881.376022] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.382581] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780227, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.569455] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ff20dc49-9099-4f18-b17a-a34485e380b6 tempest-DeleteServersTestJSON-2143897756 tempest-DeleteServersTestJSON-2143897756-project-member] Lock "88dae632-b363-4187-9198-e4300783d420" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.678s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.570623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d5214a29-7268-4985-96a4-b3defe33beb4 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "19941838-d6b0-4fb8-9d06-f4a1b80ba428" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.099s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.574228] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.575202] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.575679] env[63379]: DEBUG nova.compute.manager [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Going to confirm migration 6 {{(pid=63379) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1881.858158] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678453} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.858571] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1881.858880] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1881.859224] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d32feac-b7e9-4036-8a4b-c1bb4aa85652 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.866705] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1881.866705] env[63379]: value = "task-1780228" [ 1881.866705] env[63379]: _type = "Task" [ 1881.866705] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.875077] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780228, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.884081] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780227, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.161399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.161613] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.162891] env[63379]: DEBUG nova.network.neutron [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1882.163254] env[63379]: DEBUG nova.objects.instance [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'info_cache' on Instance uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1882.378026] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780228, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070487} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.381389] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1882.382262] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49132f2c-4235-47ef-a1c6-1a62a164486b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.389618] env[63379]: DEBUG oslo_vmware.api [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780227, 'name': RemoveSnapshot_Task, 'duration_secs': 0.652553} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.398590] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1882.398845] env[63379]: INFO nova.compute.manager [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Took 12.91 seconds to snapshot the instance on the hypervisor. [ 1882.410266] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1882.410266] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c72ec9b-54e2-4900-9ed5-6ebda2541cf3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.430024] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1882.430024] env[63379]: value = "task-1780230" [ 1882.430024] env[63379]: _type = "Task" [ 1882.430024] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.438285] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780230, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.533827] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.535282] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.940351] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780230, 'name': ReconfigVM_Task, 'duration_secs': 0.253023} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.940661] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99/2be6bdea-416e-4912-8930-3c4e4f194f99.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1882.942869] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'guest_format': None, 'device_type': 'disk', 'encryption_secret_uuid': None, 'boot_index': 0, 'encrypted': False, 'size': 0, 'device_name': '/dev/sda', 'disk_bus': None, 'encryption_options': None, 'image_id': 'd3d2d67c-c3e3-4e1e-9156-0c896c5b3d48'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'}, 'attachment_id': 'feb7db0c-feca-4459-ac8a-abec05533f2e', 'boot_index': None, 'mount_device': '/dev/sdb', 'disk_bus': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=63379) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1882.942869] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1882.942869] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1882.945208] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72091979-d5b6-4443-8d45-a5ff20132fb6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.963509] env[63379]: DEBUG nova.compute.manager [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Found 3 images (rotation: 2) {{(pid=63379) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1882.963740] env[63379]: DEBUG nova.compute.manager [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Rotating out 1 backups {{(pid=63379) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 1882.964055] env[63379]: DEBUG nova.compute.manager [None req-527fba4a-8b14-43b1-8017-bc5aa0c168f3 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleting image b70147f5-2309-47ec-8e13-4ad453606361 {{(pid=63379) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 1882.966733] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc6507d-cca2-4657-90c8-4403466227ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.992222] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2/volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1882.992888] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3ae25d8-fe25-444b-bbd8-943da765805d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.011075] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1883.011075] env[63379]: value = "task-1780231" [ 1883.011075] env[63379]: _type = "Task" [ 1883.011075] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.018808] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.037629] env[63379]: DEBUG nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1883.395880] env[63379]: DEBUG nova.network.neutron [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [{"id": "ef820562-0de4-462d-a51d-13e4a4929719", "address": "fa:16:3e:eb:5b:7f", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef820562-0d", "ovs_interfaceid": "ef820562-0de4-462d-a51d-13e4a4929719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.520515] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780231, 'name': ReconfigVM_Task, 'duration_secs': 0.275128} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.520799] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfigured VM instance instance-00000061 to attach disk [datastore1] volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2/volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1883.525464] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af861191-1325-4494-a932-28960f777579 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.539747] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1883.539747] env[63379]: value = "task-1780232" [ 1883.539747] env[63379]: _type = "Task" [ 1883.539747] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.547903] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780232, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.559873] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.560148] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.561705] env[63379]: INFO nova.compute.claims [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1883.664993] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.664993] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.898784] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-90f0c97d-695b-4975-8ab9-4e77a9175da1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.899094] env[63379]: DEBUG nova.objects.instance [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'migration_context' on Instance uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1884.049355] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780232, 'name': ReconfigVM_Task, 'duration_secs': 0.144802} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.050027] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1884.050328] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ca568b7-95bb-4ebd-aca3-70bbd72544cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.057082] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1884.057082] env[63379]: value = "task-1780233" [ 1884.057082] env[63379]: _type = "Task" [ 1884.057082] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.067148] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780233, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.166765] env[63379]: DEBUG nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1884.402363] env[63379]: DEBUG nova.objects.base [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Object Instance<90f0c97d-695b-4975-8ab9-4e77a9175da1> lazy-loaded attributes: info_cache,migration_context {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1884.403371] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d6042f-5342-48cc-b238-7bd9008baa23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.423912] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de526378-3d5a-4993-ac0e-859be75967c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.430600] env[63379]: DEBUG oslo_vmware.api [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1884.430600] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52438743-a5a3-c65f-20c7-6e761dc8f91c" [ 1884.430600] env[63379]: _type = "Task" [ 1884.430600] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.440254] env[63379]: DEBUG oslo_vmware.api [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52438743-a5a3-c65f-20c7-6e761dc8f91c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.567034] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780233, 'name': Rename_Task, 'duration_secs': 0.148015} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.567034] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1884.567034] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45fdeabd-564e-43b4-ad10-d8015e749f3e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.575806] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1884.575806] env[63379]: value = "task-1780234" [ 1884.575806] env[63379]: _type = "Task" [ 1884.575806] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.583540] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.674590] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f2aeed-a940-42f2-95fa-e5388875d034 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.684594] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f45733a-7ac1-445e-95cc-b952c7ab1172 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.688697] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.717782] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582b3178-f2dd-4c2d-9806-dd35bb2c4bbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.725856] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bb8704-374b-41b5-913a-14d942f1e05b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.739404] env[63379]: DEBUG nova.compute.provider_tree [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.941192] env[63379]: DEBUG oslo_vmware.api [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52438743-a5a3-c65f-20c7-6e761dc8f91c, 'name': SearchDatastore_Task, 'duration_secs': 0.008118} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.941505] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.086285] env[63379]: DEBUG oslo_vmware.api [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780234, 'name': PowerOnVM_Task, 'duration_secs': 0.425162} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.086837] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1885.086837] env[63379]: DEBUG nova.compute.manager [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1885.087659] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9afc2a-8e9a-4777-a6fe-634a0360ae42 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.243583] env[63379]: DEBUG nova.scheduler.client.report [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1885.269032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.269032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.269032] env[63379]: DEBUG nova.compute.manager [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1885.269376] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68fd319-9bb3-4d8d-8089-40c4bc7b535e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.276218] env[63379]: DEBUG nova.compute.manager [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1885.276768] env[63379]: DEBUG nova.objects.instance [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'flavor' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1885.603545] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.750393] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.750393] env[63379]: DEBUG nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1885.752456] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.063s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.753531] env[63379]: INFO nova.compute.claims [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1885.780938] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1885.781311] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f19d5229-55d9-42ee-b486-c608505fc772 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.792277] env[63379]: DEBUG oslo_vmware.api [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1885.792277] env[63379]: value = "task-1780235" [ 1885.792277] env[63379]: _type = "Task" [ 1885.792277] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.809117] env[63379]: DEBUG oslo_vmware.api [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.257860] env[63379]: DEBUG nova.compute.utils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1886.259392] env[63379]: DEBUG nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1886.259562] env[63379]: DEBUG nova.network.neutron [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1886.302334] env[63379]: DEBUG oslo_vmware.api [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780235, 'name': PowerOffVM_Task, 'duration_secs': 0.237279} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.302478] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1886.302645] env[63379]: DEBUG nova.compute.manager [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1886.303610] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a665c7-a5b6-4b6a-b36f-1e8e53fc4d36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.531130] env[63379]: DEBUG nova.policy [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1886.762952] env[63379]: DEBUG nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1886.817608] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c4e505b6-ba88-410a-b59d-b7ca230795e8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.549s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.899664] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1b0625-8cbe-4d66-8c95-3ba5ac1fae28 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.908692] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4a9eea-577e-4fa8-bb37-6592f20ddc25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.943808] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e51ff0e-7bbf-4e31-b9e7-d07531ba02cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.952139] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed82d00d-0989-4799-ba86-9ea159c310d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.968086] env[63379]: DEBUG nova.compute.provider_tree [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1887.002430] env[63379]: DEBUG nova.network.neutron [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Successfully created port: 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1887.471022] env[63379]: DEBUG nova.scheduler.client.report [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1887.776284] env[63379]: DEBUG nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1887.805710] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1887.805710] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1887.805908] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1887.806248] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1887.806450] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1887.806615] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1887.806832] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1887.807029] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1887.807194] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1887.807439] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1887.807589] env[63379]: DEBUG nova.virt.hardware [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1887.808763] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e060a895-71f6-421e-8b07-70275d18da89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.817644] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feaecd20-93f1-470f-aaf8-7253f1c36228 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.976646] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.977709] env[63379]: DEBUG nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1887.981576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.040s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.485405] env[63379]: DEBUG nova.compute.utils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1888.490267] env[63379]: DEBUG nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1888.490388] env[63379]: DEBUG nova.network.neutron [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1888.519620] env[63379]: DEBUG nova.compute.manager [req-d72cbdca-ba1e-4716-af74-6a9404311bdd req-9401c0c2-31a3-480d-8e71-d3b35ffe33e2 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-vif-plugged-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1888.520305] env[63379]: DEBUG oslo_concurrency.lockutils [req-d72cbdca-ba1e-4716-af74-6a9404311bdd req-9401c0c2-31a3-480d-8e71-d3b35ffe33e2 service nova] Acquiring lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.520305] env[63379]: DEBUG oslo_concurrency.lockutils [req-d72cbdca-ba1e-4716-af74-6a9404311bdd req-9401c0c2-31a3-480d-8e71-d3b35ffe33e2 service nova] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.520439] env[63379]: DEBUG oslo_concurrency.lockutils [req-d72cbdca-ba1e-4716-af74-6a9404311bdd req-9401c0c2-31a3-480d-8e71-d3b35ffe33e2 service nova] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.520543] env[63379]: DEBUG nova.compute.manager [req-d72cbdca-ba1e-4716-af74-6a9404311bdd req-9401c0c2-31a3-480d-8e71-d3b35ffe33e2 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] No waiting events found dispatching network-vif-plugged-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1888.520670] env[63379]: WARNING nova.compute.manager [req-d72cbdca-ba1e-4716-af74-6a9404311bdd req-9401c0c2-31a3-480d-8e71-d3b35ffe33e2 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received unexpected event network-vif-plugged-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 for instance with vm_state building and task_state spawning. [ 1888.531249] env[63379]: DEBUG nova.policy [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4daab3ae5955497a9d25b4ef59118d0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba1a1cf17f9941b299a6102689835f88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1888.626570] env[63379]: DEBUG nova.network.neutron [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Successfully updated port: 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1888.640780] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b207ad2-aab4-4001-8279-ca2fef558fbd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.655844] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f625283a-ecbc-47a9-9911-e99c0c69bc15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.710818] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e52b7a-a7a3-4381-9e3f-5385eb45fd24 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.715267] env[63379]: DEBUG nova.compute.manager [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Stashing vm_state: stopped {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1888.729821] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4ebec3-be75-48e4-a0d6-da18e0b7ab9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.746866] env[63379]: DEBUG nova.compute.provider_tree [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1888.851987] env[63379]: DEBUG nova.network.neutron [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Successfully created port: 3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1888.966482] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.966728] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.990579] env[63379]: DEBUG nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1889.129872] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.130488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.130488] env[63379]: DEBUG nova.network.neutron [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1889.211226] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "14adcb7b-b754-407e-9a99-28a1ca2ede68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.211515] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "14adcb7b-b754-407e-9a99-28a1ca2ede68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.237503] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.250093] env[63379]: DEBUG nova.scheduler.client.report [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1889.468699] env[63379]: DEBUG nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1889.660934] env[63379]: DEBUG nova.network.neutron [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1889.716031] env[63379]: DEBUG nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1889.831446] env[63379]: DEBUG nova.network.neutron [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.987634] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.999995] env[63379]: DEBUG nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1890.027875] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1890.028156] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1890.028325] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1890.028509] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1890.028663] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1890.028888] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1890.029040] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1890.029223] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1890.029397] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1890.029566] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1890.029748] env[63379]: DEBUG nova.virt.hardware [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1890.030720] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb3ce5c-d58b-47cc-9102-797dfd17b9fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.038752] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28250a0d-1425-4304-9d82-085ccf1eda9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.243918] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.257844] env[63379]: DEBUG nova.compute.manager [req-9807ac3f-7042-42a3-8c34-24f540212090 req-6cb64d89-5da9-4d36-9697-af5bb77d3ece service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Received event network-vif-plugged-3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1890.258111] env[63379]: DEBUG oslo_concurrency.lockutils [req-9807ac3f-7042-42a3-8c34-24f540212090 req-6cb64d89-5da9-4d36-9697-af5bb77d3ece service nova] Acquiring lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.258384] env[63379]: DEBUG oslo_concurrency.lockutils [req-9807ac3f-7042-42a3-8c34-24f540212090 req-6cb64d89-5da9-4d36-9697-af5bb77d3ece service nova] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.258514] env[63379]: DEBUG oslo_concurrency.lockutils [req-9807ac3f-7042-42a3-8c34-24f540212090 req-6cb64d89-5da9-4d36-9697-af5bb77d3ece service nova] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.258688] env[63379]: DEBUG nova.compute.manager [req-9807ac3f-7042-42a3-8c34-24f540212090 req-6cb64d89-5da9-4d36-9697-af5bb77d3ece service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] No waiting events found dispatching network-vif-plugged-3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1890.258858] env[63379]: WARNING nova.compute.manager [req-9807ac3f-7042-42a3-8c34-24f540212090 req-6cb64d89-5da9-4d36-9697-af5bb77d3ece service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Received unexpected event network-vif-plugged-3a859294-da1a-435c-aa5c-a1ec72c124c2 for instance with vm_state building and task_state spawning. [ 1890.260509] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.279s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.263197] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.660s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.263375] env[63379]: DEBUG nova.objects.instance [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1890.334434] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1890.334840] env[63379]: DEBUG nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Instance network_info: |[{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1890.335221] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:a7:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1890.346095] env[63379]: DEBUG oslo.service.loopingcall [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1890.346609] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1890.346996] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10e86ef3-a18d-4970-a775-391111cb19e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.369468] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1890.369468] env[63379]: value = "task-1780236" [ 1890.369468] env[63379]: _type = "Task" [ 1890.369468] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.379347] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780236, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.407306] env[63379]: DEBUG nova.network.neutron [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Successfully updated port: 3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1890.546272] env[63379]: DEBUG nova.compute.manager [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1890.547037] env[63379]: DEBUG nova.compute.manager [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing instance network info cache due to event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1890.547131] env[63379]: DEBUG oslo_concurrency.lockutils [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.547245] env[63379]: DEBUG oslo_concurrency.lockutils [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.547414] env[63379]: DEBUG nova.network.neutron [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1890.816743] env[63379]: INFO nova.scheduler.client.report [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted allocation for migration 98be47e2-858e-45b1-8975-ef107c38cf34 [ 1890.878765] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780236, 'name': CreateVM_Task, 'duration_secs': 0.381031} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.879561] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1890.879819] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.880012] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.880337] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1890.880589] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daa40da7-de6b-41dc-83d9-3d9e70e6e091 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.885016] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1890.885016] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b34e98-e1a0-3ada-2d05-0dabc1d8fdff" [ 1890.885016] env[63379]: _type = "Task" [ 1890.885016] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.892788] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b34e98-e1a0-3ada-2d05-0dabc1d8fdff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.910471] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.910575] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.910709] env[63379]: DEBUG nova.network.neutron [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1891.231112] env[63379]: DEBUG nova.network.neutron [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updated VIF entry in instance network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1891.231516] env[63379]: DEBUG nova.network.neutron [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.273466] env[63379]: DEBUG oslo_concurrency.lockutils [None req-443791bc-f3ac-469c-aeca-b22806c15d23 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.274982] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.037s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.322073] env[63379]: DEBUG oslo_concurrency.lockutils [None req-315c5d38-3397-48b7-bd75-842ec6e856c0 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.747s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.394331] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b34e98-e1a0-3ada-2d05-0dabc1d8fdff, 'name': SearchDatastore_Task, 'duration_secs': 0.010477} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.394663] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.394913] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1891.395174] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.395329] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.395524] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.395779] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-317d95c4-1a41-402c-be69-5ca7d6a83d32 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.403420] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1891.403595] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1891.404273] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3acaf9d8-b0c7-4644-83fb-7e72de28855e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.408801] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1891.408801] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5274d4d6-0a64-eea0-5103-d48e3b609d7f" [ 1891.408801] env[63379]: _type = "Task" [ 1891.408801] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.417202] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5274d4d6-0a64-eea0-5103-d48e3b609d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.444761] env[63379]: DEBUG nova.network.neutron [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1891.735602] env[63379]: DEBUG oslo_concurrency.lockutils [req-e9796402-f0eb-491c-9947-2448431f020b req-ff9e2199-05f6-4115-93c6-5099445c5d49 service nova] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.766933] env[63379]: DEBUG nova.network.neutron [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Updating instance_info_cache with network_info: [{"id": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "address": "fa:16:3e:60:52:ad", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a859294-da", "ovs_interfaceid": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.779362] env[63379]: INFO nova.compute.claims [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1891.921348] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5274d4d6-0a64-eea0-5103-d48e3b609d7f, 'name': SearchDatastore_Task, 'duration_secs': 0.007794} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.922178] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-868ff3f9-dd12-48e2-b601-86b1e7428d9b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.927801] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1891.927801] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cee71a-0999-3b09-879d-2be4a8df8d50" [ 1891.927801] env[63379]: _type = "Task" [ 1891.927801] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.935539] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cee71a-0999-3b09-879d-2be4a8df8d50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.270207] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.270556] env[63379]: DEBUG nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Instance network_info: |[{"id": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "address": "fa:16:3e:60:52:ad", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a859294-da", "ovs_interfaceid": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1892.270993] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:52:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1195acd-707f-4bac-a99d-14db17a63802', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a859294-da1a-435c-aa5c-a1ec72c124c2', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1892.278637] env[63379]: DEBUG oslo.service.loopingcall [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.278845] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1892.279463] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c9f63d8-f180-422a-8314-ad237cf83990 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.297833] env[63379]: INFO nova.compute.resource_tracker [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating resource usage from migration a9afaa25-c843-4661-9b08-5f138ce82641 [ 1892.301096] env[63379]: DEBUG nova.compute.manager [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Received event network-changed-3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1892.301287] env[63379]: DEBUG nova.compute.manager [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Refreshing instance network info cache due to event network-changed-3a859294-da1a-435c-aa5c-a1ec72c124c2. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1892.301494] env[63379]: DEBUG oslo_concurrency.lockutils [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] Acquiring lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.301645] env[63379]: DEBUG oslo_concurrency.lockutils [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] Acquired lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.301803] env[63379]: DEBUG nova.network.neutron [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Refreshing network info cache for port 3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1892.308551] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1892.308551] env[63379]: value = "task-1780237" [ 1892.308551] env[63379]: _type = "Task" [ 1892.308551] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.316908] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.425069] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3517fb6-03a4-428a-880c-0d56044b0df9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.434497] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a36525-526f-44f3-8706-01b98007813f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.440973] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cee71a-0999-3b09-879d-2be4a8df8d50, 'name': SearchDatastore_Task, 'duration_secs': 0.009699} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.441592] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.441892] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 48c17c3b-1197-46cb-a0f7-3671b2d82c7e/48c17c3b-1197-46cb-a0f7-3671b2d82c7e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1892.442148] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e875284-905f-4c6e-831c-6b01d6fb215c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.469296] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851d605e-06c2-413c-887f-48488f634477 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.472648] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1892.472648] env[63379]: value = "task-1780238" [ 1892.472648] env[63379]: _type = "Task" [ 1892.472648] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.479396] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e65d73-c203-41ce-b243-a8b0c639e61e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.486570] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.486868] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.487153] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.487417] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.487656] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.489337] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.489833] env[63379]: INFO nova.compute.manager [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Terminating instance [ 1892.491850] env[63379]: DEBUG nova.compute.manager [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1892.492131] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1892.493014] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5571c5-4655-481c-8a64-cd04334cf98b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.504738] env[63379]: DEBUG nova.compute.provider_tree [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.509731] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1892.509956] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e9cbe61-cc0b-437d-9093-4766ae3ee00b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.515400] env[63379]: DEBUG oslo_vmware.api [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1892.515400] env[63379]: value = "task-1780239" [ 1892.515400] env[63379]: _type = "Task" [ 1892.515400] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.523018] env[63379]: DEBUG oslo_vmware.api [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.818822] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.986685] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780238, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.009159] env[63379]: DEBUG nova.scheduler.client.report [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1893.026057] env[63379]: DEBUG oslo_vmware.api [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780239, 'name': PowerOffVM_Task, 'duration_secs': 0.204024} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.026360] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1893.026616] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1893.026812] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ade9f23d-7f6f-4726-99d1-4cd3730b4ef2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.175498] env[63379]: DEBUG nova.network.neutron [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Updated VIF entry in instance network info cache for port 3a859294-da1a-435c-aa5c-a1ec72c124c2. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1893.175983] env[63379]: DEBUG nova.network.neutron [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Updating instance_info_cache with network_info: [{"id": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "address": "fa:16:3e:60:52:ad", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a859294-da", "ovs_interfaceid": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.319097] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.483352] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780238, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642202} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.483579] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 48c17c3b-1197-46cb-a0f7-3671b2d82c7e/48c17c3b-1197-46cb-a0f7-3671b2d82c7e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1893.483800] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1893.484068] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42b5edb2-a9fb-463b-b766-0f2c024e51ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.490197] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1893.490197] env[63379]: value = "task-1780241" [ 1893.490197] env[63379]: _type = "Task" [ 1893.490197] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.497278] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.521229] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.247s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.521437] env[63379]: INFO nova.compute.manager [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Migrating [ 1893.527724] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.540s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.529145] env[63379]: INFO nova.compute.claims [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1893.678540] env[63379]: DEBUG oslo_concurrency.lockutils [req-74b2cf76-2dac-4486-b096-b5e0b9ea349a req-d1aef3fb-8cce-4709-a979-25651485769f service nova] Releasing lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.819440] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.963794] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.964131] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.964291] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1893.964458] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.999196] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064501} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.999498] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1894.000303] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17459e2-cd8f-4f9d-bf66-0d4adaa13b59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.022094] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 48c17c3b-1197-46cb-a0f7-3671b2d82c7e/48c17c3b-1197-46cb-a0f7-3671b2d82c7e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1894.022314] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf4f0d7c-4caa-45de-a943-f12df76d5474 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.041616] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.041787] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.041962] env[63379]: DEBUG nova.network.neutron [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1894.044432] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1894.044432] env[63379]: value = "task-1780242" [ 1894.044432] env[63379]: _type = "Task" [ 1894.044432] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.054524] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780242, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.320691] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.467516] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.559908] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780242, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.670117] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b799af-1e04-41e9-9ce3-15e33433aa4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.677870] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ced54e-1719-468f-9e33-84743d5fc234 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.710701] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a217a15-0c81-403c-8667-a92c1bb8cc59 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.718026] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949126d4-3058-460a-9181-7aa66b9eb0e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.731132] env[63379]: DEBUG nova.compute.provider_tree [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.777076] env[63379]: DEBUG nova.network.neutron [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.820757] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.059491] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780242, 'name': ReconfigVM_Task, 'duration_secs': 0.755968} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.059782] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 48c17c3b-1197-46cb-a0f7-3671b2d82c7e/48c17c3b-1197-46cb-a0f7-3671b2d82c7e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1895.060422] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2da2039-6840-4a3d-af8d-92c96fa12cab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.067512] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1895.067512] env[63379]: value = "task-1780243" [ 1895.067512] env[63379]: _type = "Task" [ 1895.067512] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.074891] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780243, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.234365] env[63379]: DEBUG nova.scheduler.client.report [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1895.279908] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.321117] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.577769] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780243, 'name': Rename_Task, 'duration_secs': 0.140185} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.578011] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1895.578260] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1b46d02-ec09-4d8b-9caf-e14286593010 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.583666] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1895.583666] env[63379]: value = "task-1780244" [ 1895.583666] env[63379]: _type = "Task" [ 1895.583666] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.590731] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.739424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.740128] env[63379]: DEBUG nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1895.742790] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.499s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.744232] env[63379]: INFO nova.compute.claims [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1895.823490] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.096073] env[63379]: DEBUG oslo_vmware.api [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780244, 'name': PowerOnVM_Task, 'duration_secs': 0.410991} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.096378] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1896.096707] env[63379]: INFO nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1896.096967] env[63379]: DEBUG nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1896.097902] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bced15-e1d6-4548-bf65-47a733846e60 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.249213] env[63379]: DEBUG nova.compute.utils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1896.252874] env[63379]: DEBUG nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Not allocating networking since 'none' was specified. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1896.323391] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.615494] env[63379]: INFO nova.compute.manager [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Took 13.07 seconds to build instance. [ 1896.757022] env[63379]: DEBUG nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1896.798097] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd2c283-f75f-4606-a959-b1ab4e9aa7b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.815724] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance '4b419aa8-d4da-45fd-a6da-6f05ee851f2f' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1896.829633] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.881074] env[63379]: DEBUG nova.compute.manager [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1896.881074] env[63379]: DEBUG nova.compute.manager [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing instance network info cache due to event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1896.881201] env[63379]: DEBUG oslo_concurrency.lockutils [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.882102] env[63379]: DEBUG oslo_concurrency.lockutils [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.882355] env[63379]: DEBUG nova.network.neutron [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1896.921480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cc2e20-8e09-4186-be37-a9f454096a29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.929607] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d151aaa-d86d-4344-8d53-f12a7fada5e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.962828] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4fe628-6e14-4102-9aab-5d32b49e86d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.970447] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc208737-710c-497c-bc9b-68e16173f931 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.983902] env[63379]: DEBUG nova.compute.provider_tree [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.117453] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4454d20b-d4fc-4ff3-89ce-0af5c6db78cc tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.583s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.321075] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1897.321364] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c79ad0bb-1b8a-412f-90c4-0b2c8b2f49ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.328821] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.329964] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1897.329964] env[63379]: value = "task-1780245" [ 1897.329964] env[63379]: _type = "Task" [ 1897.329964] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.339119] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1897.339227] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance '4b419aa8-d4da-45fd-a6da-6f05ee851f2f' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1897.488031] env[63379]: DEBUG nova.scheduler.client.report [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1897.686448] env[63379]: DEBUG nova.network.neutron [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updated VIF entry in instance network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1897.686908] env[63379]: DEBUG nova.network.neutron [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.759825] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "d4988643-18ff-44c8-8363-e0de43da2abe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.760118] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "d4988643-18ff-44c8-8363-e0de43da2abe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.765754] env[63379]: DEBUG nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1897.790151] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1897.790413] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1897.790576] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1897.790769] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1897.790919] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1897.791081] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1897.791294] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1897.791452] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1897.791619] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1897.791784] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1897.791957] env[63379]: DEBUG nova.virt.hardware [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1897.792820] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c896cd-fe84-4f04-97fc-07d507c2ae04 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.801540] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca3de8d-473a-49f0-9807-0e6417d6a6d3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.815372] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1897.821140] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Creating folder: Project (744ecfdbb1fc40fca2fb00d2fba6a592). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1897.821568] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73633a81-87f0-4924-8eae-0eb67eba60e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.831552] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.834159] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Created folder: Project (744ecfdbb1fc40fca2fb00d2fba6a592) in parent group-v369214. [ 1897.834356] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Creating folder: Instances. Parent ref: group-v369494. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1897.834581] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99bdce55-ee12-4279-91c1-79c6e8cdca3c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.843245] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Created folder: Instances in parent group-v369494. [ 1897.843483] env[63379]: DEBUG oslo.service.loopingcall [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.843663] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1897.843859] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbf6ea14-909d-4cdf-8fd7-002dada2a9af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.857141] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1897.857350] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1897.857540] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1897.857730] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1897.857885] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1897.858045] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1897.858252] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1897.858416] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1897.858611] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1897.858791] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1897.858970] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1897.864290] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c61e8ef-cf1f-470f-97ff-725e22fbba50 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.879101] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1897.879101] env[63379]: value = "task-1780249" [ 1897.879101] env[63379]: _type = "Task" [ 1897.879101] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.880223] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1897.880223] env[63379]: value = "task-1780248" [ 1897.880223] env[63379]: _type = "Task" [ 1897.880223] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.890323] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780249, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.893089] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780248, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.995073] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.995652] env[63379]: DEBUG nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1897.998386] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.531s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.998606] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.998774] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1897.999877] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcd3a61-07f1-440a-bcf4-994f21ea2250 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.007857] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b317bd1-63a2-47ea-b8fd-c4cb2296b699 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.021419] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd701fe-0e24-4bda-92ee-361711dc061a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.028455] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43fc6674-f034-4c77-96bb-8ef4e227f75a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.059929] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179761MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1898.060119] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.060387] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.190508] env[63379]: DEBUG oslo_concurrency.lockutils [req-99908c07-2375-4315-b94c-e088dc169947 req-6801c6d3-2119-4f2e-835c-6c402cce42b7 service nova] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.262768] env[63379]: DEBUG nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1898.331534] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.392194] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780249, 'name': ReconfigVM_Task, 'duration_secs': 0.181143} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.395214] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance '4b419aa8-d4da-45fd-a6da-6f05ee851f2f' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1898.398397] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780248, 'name': CreateVM_Task, 'duration_secs': 0.298155} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.398561] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1898.399159] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.399325] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.399637] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1898.399878] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1356440-fdfa-4c00-942f-80c9d2ef5ffe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.403977] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1898.403977] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525be929-e8b3-2256-76a2-5ac4ff3fd33d" [ 1898.403977] env[63379]: _type = "Task" [ 1898.403977] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.411181] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525be929-e8b3-2256-76a2-5ac4ff3fd33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.500598] env[63379]: DEBUG nova.compute.utils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1898.502585] env[63379]: DEBUG nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Not allocating networking since 'none' was specified. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1898.785924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.832464] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.901433] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1898.901688] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1898.901880] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1898.902139] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1898.902459] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1898.902663] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1898.902892] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1898.903073] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1898.903252] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1898.903419] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1898.903598] env[63379]: DEBUG nova.virt.hardware [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1898.909170] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Reconfiguring VM instance instance-00000058 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1898.909513] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7966b1e-ff43-4c8d-8953-7de620d49acb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.931472] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525be929-e8b3-2256-76a2-5ac4ff3fd33d, 'name': SearchDatastore_Task, 'duration_secs': 0.009747} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.932671] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.932921] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1898.933174] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.933326] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.933508] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1898.933815] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1898.933815] env[63379]: value = "task-1780250" [ 1898.933815] env[63379]: _type = "Task" [ 1898.933815] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.934012] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-690a8ccf-0b27-4299-b94d-5131a6c565c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.943405] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780250, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.944342] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1898.944515] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1898.945218] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17f708ba-0b19-42a8-b278-77c92c9f4978 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.949883] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1898.949883] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]523ffb24-67bf-df5d-29db-6c8417a7d914" [ 1898.949883] env[63379]: _type = "Task" [ 1898.949883] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.958569] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523ffb24-67bf-df5d-29db-6c8417a7d914, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.003504] env[63379]: DEBUG nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1899.070336] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Applying migration context for instance 4b419aa8-d4da-45fd-a6da-6f05ee851f2f as it has an incoming, in-progress migration a9afaa25-c843-4661-9b08-5f138ce82641. Migration status is migrating {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1899.071618] env[63379]: INFO nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating resource usage from migration a9afaa25-c843-4661-9b08-5f138ce82641 [ 1899.088709] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 510db409-0b4c-494a-8084-39ef3cd6c918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.088875] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 2be6bdea-416e-4912-8930-3c4e4f194f99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.089009] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d3c05ba6-b565-4432-b815-14ae0933853e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.089132] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.089250] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 48c17c3b-1197-46cb-a0f7-3671b2d82c7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.089364] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.089479] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Migration a9afaa25-c843-4661-9b08-5f138ce82641 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1899.089667] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 4b419aa8-d4da-45fd-a6da-6f05ee851f2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.089765] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance b3f753e3-2ec6-4359-8de0-f9c771e274e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.089903] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 14adcb7b-b754-407e-9a99-28a1ca2ede68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1899.332684] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.367220] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1899.367448] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1899.367736] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleting the datastore file [datastore1] 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1899.367991] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74fe25db-9ee8-4e34-9eeb-2389690755c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.374507] env[63379]: DEBUG oslo_vmware.api [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1899.374507] env[63379]: value = "task-1780251" [ 1899.374507] env[63379]: _type = "Task" [ 1899.374507] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.382385] env[63379]: DEBUG oslo_vmware.api [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.445162] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780250, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.458837] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523ffb24-67bf-df5d-29db-6c8417a7d914, 'name': SearchDatastore_Task, 'duration_secs': 0.008566} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.459611] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60957a59-3ba3-45eb-8ad2-5ccd3d2e6c84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.464392] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1899.464392] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ae89ff-9b85-e6b0-8c07-336fa87c316d" [ 1899.464392] env[63379]: _type = "Task" [ 1899.464392] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.471701] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ae89ff-9b85-e6b0-8c07-336fa87c316d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.593055] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d4988643-18ff-44c8-8363-e0de43da2abe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1899.593055] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1899.593055] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2560MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1899.731065] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f797ff-0324-4207-9b4b-297a9d4256d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.738829] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d2f826-f360-4e87-8605-87dc0443eccb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.770465] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4221ec09-8822-4819-bbd3-39941d524914 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.778272] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c3b3fd-c9cc-4b41-82a7-5a37342b55f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.793803] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1899.833394] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780237, 'name': CreateVM_Task, 'duration_secs': 7.306432} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.833563] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1899.834254] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.834425] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1899.834746] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1899.834992] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-455472d0-050f-40e1-87c0-c42ac6c925a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.839443] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1899.839443] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f0ce3a-d508-4c9e-ad66-b42f4ea6b7ff" [ 1899.839443] env[63379]: _type = "Task" [ 1899.839443] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.846689] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f0ce3a-d508-4c9e-ad66-b42f4ea6b7ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.883574] env[63379]: DEBUG oslo_vmware.api [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167557} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.883859] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1899.884028] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1899.884281] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1899.884468] env[63379]: INFO nova.compute.manager [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Took 7.39 seconds to destroy the instance on the hypervisor. [ 1899.884745] env[63379]: DEBUG oslo.service.loopingcall [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1899.884954] env[63379]: DEBUG nova.compute.manager [-] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1899.885062] env[63379]: DEBUG nova.network.neutron [-] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1899.944553] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780250, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.973882] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ae89ff-9b85-e6b0-8c07-336fa87c316d, 'name': SearchDatastore_Task, 'duration_secs': 0.01213} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.974229] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.974489] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b3f753e3-2ec6-4359-8de0-f9c771e274e5/b3f753e3-2ec6-4359-8de0-f9c771e274e5.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1899.974745] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f5ddfcd-fdc3-4fc7-a893-cafb9f3a5f15 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.980724] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1899.980724] env[63379]: value = "task-1780252" [ 1899.980724] env[63379]: _type = "Task" [ 1899.980724] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.988449] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.012806] env[63379]: DEBUG nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1900.037800] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1900.038118] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1900.038323] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.038536] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1900.038752] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.038920] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1900.039240] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1900.039451] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1900.039658] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1900.039864] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1900.040079] env[63379]: DEBUG nova.virt.hardware [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1900.040935] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f4fac7-9be7-4b28-8bd9-960754166793 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.049863] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5d04d8-6f9e-4c69-ad11-c86c17293ac2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.064113] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1900.069951] env[63379]: DEBUG oslo.service.loopingcall [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1900.070559] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1900.070810] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09628719-a75b-40b5-b244-1ecb3fa96729 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.087644] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1900.087644] env[63379]: value = "task-1780253" [ 1900.087644] env[63379]: _type = "Task" [ 1900.087644] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.095393] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780253, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.300256] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1900.350812] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f0ce3a-d508-4c9e-ad66-b42f4ea6b7ff, 'name': SearchDatastore_Task, 'duration_secs': 0.008782} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.351169] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.351401] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1900.351649] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.351807] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.351992] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1900.352288] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70b96339-0bd9-4f1c-89a2-51053787c78d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.370028] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1900.370268] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1900.371076] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22646e1f-3a1b-43a3-b571-6221860ed772 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.377660] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1900.377660] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524df3ba-b1c0-9b64-4bc8-7b0230c5ad4f" [ 1900.377660] env[63379]: _type = "Task" [ 1900.377660] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.386115] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524df3ba-b1c0-9b64-4bc8-7b0230c5ad4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.417092] env[63379]: DEBUG nova.compute.manager [req-08a0cc1a-58b1-4806-9444-1b5d2a0d581a req-2d61d115-397c-449c-a31b-fea370d9c3f3 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Received event network-vif-deleted-ef820562-0de4-462d-a51d-13e4a4929719 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1900.417344] env[63379]: INFO nova.compute.manager [req-08a0cc1a-58b1-4806-9444-1b5d2a0d581a req-2d61d115-397c-449c-a31b-fea370d9c3f3 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Neutron deleted interface ef820562-0de4-462d-a51d-13e4a4929719; detaching it from the instance and deleting it from the info cache [ 1900.417572] env[63379]: DEBUG nova.network.neutron [req-08a0cc1a-58b1-4806-9444-1b5d2a0d581a req-2d61d115-397c-449c-a31b-fea370d9c3f3 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.446180] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780250, 'name': ReconfigVM_Task, 'duration_secs': 1.33942} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.446478] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Reconfigured VM instance instance-00000058 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1900.447289] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46627e49-38cf-474e-b525-b4fa90f00cb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.469947] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 4b419aa8-d4da-45fd-a6da-6f05ee851f2f/4b419aa8-d4da-45fd-a6da-6f05ee851f2f.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1900.470707] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b86d3289-af03-4677-8472-3e6639fb8f95 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.492798] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780252, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.494188] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1900.494188] env[63379]: value = "task-1780254" [ 1900.494188] env[63379]: _type = "Task" [ 1900.494188] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.503656] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780254, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.598478] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780253, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.808231] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1900.808517] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.748s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.808735] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.023s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.810409] env[63379]: INFO nova.compute.claims [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1900.888760] env[63379]: DEBUG nova.network.neutron [-] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.890024] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524df3ba-b1c0-9b64-4bc8-7b0230c5ad4f, 'name': SearchDatastore_Task, 'duration_secs': 0.144574} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.891932] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04a585d6-faf6-48f7-a56f-5358d971e10d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.895628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.895852] env[63379]: DEBUG oslo_concurrency.lockutils [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.900280] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1900.900280] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a9b4c4-3b71-b56c-36d3-d28f4b92d360" [ 1900.900280] env[63379]: _type = "Task" [ 1900.900280] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.909484] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a9b4c4-3b71-b56c-36d3-d28f4b92d360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.919806] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6021e800-ec3d-475b-8300-e24409ce0703 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.928237] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aaa3708-2fbe-4ebd-b271-ecd73542ce88 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.958987] env[63379]: DEBUG nova.compute.manager [req-08a0cc1a-58b1-4806-9444-1b5d2a0d581a req-2d61d115-397c-449c-a31b-fea370d9c3f3 service nova] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Detach interface failed, port_id=ef820562-0de4-462d-a51d-13e4a4929719, reason: Instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1900.993078] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780252, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662016} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.993369] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] b3f753e3-2ec6-4359-8de0-f9c771e274e5/b3f753e3-2ec6-4359-8de0-f9c771e274e5.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1900.993621] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1900.993842] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a93fa17e-9774-44a7-8866-0093502df6ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.004919] env[63379]: DEBUG oslo_vmware.api [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780254, 'name': ReconfigVM_Task, 'duration_secs': 0.277498} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.006328] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 4b419aa8-d4da-45fd-a6da-6f05ee851f2f/4b419aa8-d4da-45fd-a6da-6f05ee851f2f.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1901.006630] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance '4b419aa8-d4da-45fd-a6da-6f05ee851f2f' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1901.010921] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1901.010921] env[63379]: value = "task-1780255" [ 1901.010921] env[63379]: _type = "Task" [ 1901.010921] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.020753] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780255, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.099962] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780253, 'name': CreateVM_Task, 'duration_secs': 0.711307} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.099962] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1901.099962] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.100185] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.100441] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1901.100701] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ab9069d-b647-4261-a8ce-a152d536a4cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.105322] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1901.105322] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526d136a-e142-368a-c711-23bc3cf6fe57" [ 1901.105322] env[63379]: _type = "Task" [ 1901.105322] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.113153] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526d136a-e142-368a-c711-23bc3cf6fe57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.392643] env[63379]: INFO nova.compute.manager [-] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Took 1.51 seconds to deallocate network for instance. [ 1901.399588] env[63379]: DEBUG nova.compute.utils [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1901.410033] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a9b4c4-3b71-b56c-36d3-d28f4b92d360, 'name': SearchDatastore_Task, 'duration_secs': 0.018875} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.410139] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.410370] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6/1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1901.410643] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cea4ed6-5e11-4ca4-b71c-cc702f747709 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.418182] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1901.418182] env[63379]: value = "task-1780256" [ 1901.418182] env[63379]: _type = "Task" [ 1901.418182] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.426535] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.521012] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e98f9ee-5d12-44d0-9105-d383bbac40ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.523710] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780255, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066847} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.524023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1901.525106] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79646647-9600-4c77-a148-bf39b1cdb0b3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.542657] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05f3aef-3064-4c62-9266-e0a93f7d5349 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.559561] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] b3f753e3-2ec6-4359-8de0-f9c771e274e5/b3f753e3-2ec6-4359-8de0-f9c771e274e5.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1901.560190] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdefe050-e424-4e27-80e4-e3ba1df48449 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.587740] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance '4b419aa8-d4da-45fd-a6da-6f05ee851f2f' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1901.596982] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1901.596982] env[63379]: value = "task-1780257" [ 1901.596982] env[63379]: _type = "Task" [ 1901.596982] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.605770] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780257, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.614911] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526d136a-e142-368a-c711-23bc3cf6fe57, 'name': SearchDatastore_Task, 'duration_secs': 0.009423} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.615244] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.615501] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1901.615761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.615916] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.616138] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1901.616421] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52f33a69-9ed7-4af5-a023-ffea87a5556a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.634918] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1901.635131] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1901.635931] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fb50fda-d822-4aa0-8b2f-d2269e2c884f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.641976] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1901.641976] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dd2297-a7d2-9fef-59cd-0c669170f998" [ 1901.641976] env[63379]: _type = "Task" [ 1901.641976] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.651441] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dd2297-a7d2-9fef-59cd-0c669170f998, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.898508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.901483] env[63379]: DEBUG oslo_concurrency.lockutils [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.929209] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780256, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481468} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.929500] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6/1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1901.929767] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1901.930061] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c464f51-c4aa-43e2-89ba-56e69e29d9d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.937552] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1901.937552] env[63379]: value = "task-1780258" [ 1901.937552] env[63379]: _type = "Task" [ 1901.937552] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.946590] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780258, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.970034] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0b20e6-5c7a-4f3e-ad8e-4d1878ccb9e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.977875] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbf06fe-d26d-4e30-8740-8e70e18c6e2f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.009167] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879999d1-aa52-45ef-a3d9-f2da0e4b7417 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.016864] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27a2208-9155-4a8b-8a4f-29b562a66b08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.030474] env[63379]: DEBUG nova.compute.provider_tree [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1902.107352] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780257, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.139512] env[63379]: DEBUG nova.network.neutron [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Port d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1902.152980] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dd2297-a7d2-9fef-59cd-0c669170f998, 'name': SearchDatastore_Task, 'duration_secs': 0.061042} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.154128] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-426a6fdb-1e2a-4e18-a764-95d7af2a2d89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.159988] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1902.159988] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bce207-0205-4c82-4832-1dc3b8703404" [ 1902.159988] env[63379]: _type = "Task" [ 1902.159988] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.167503] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bce207-0205-4c82-4832-1dc3b8703404, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.447603] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780258, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.181846} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.447933] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1902.448753] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa8fd22-e62e-45b6-b945-ea62b7a5458c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.469977] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6/1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1902.471229] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1680ae31-9327-4aac-aa9b-7230fbf99bd0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.492666] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1902.492666] env[63379]: value = "task-1780259" [ 1902.492666] env[63379]: _type = "Task" [ 1902.492666] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.501099] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780259, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.552360] env[63379]: ERROR nova.scheduler.client.report [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [req-c1eae015-7c8a-4de0-bbe5-62ccbc5db0c0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c1eae015-7c8a-4de0-bbe5-62ccbc5db0c0"}]} [ 1902.569138] env[63379]: DEBUG nova.scheduler.client.report [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1902.583911] env[63379]: DEBUG nova.scheduler.client.report [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1902.584153] env[63379]: DEBUG nova.compute.provider_tree [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1902.594485] env[63379]: DEBUG nova.scheduler.client.report [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1902.607815] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780257, 'name': ReconfigVM_Task, 'duration_secs': 0.512151} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.608154] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Reconfigured VM instance instance-00000068 to attach disk [datastore1] b3f753e3-2ec6-4359-8de0-f9c771e274e5/b3f753e3-2ec6-4359-8de0-f9c771e274e5.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1902.608767] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79a66de4-494d-47de-ba0e-7250adc9fb0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.611595] env[63379]: DEBUG nova.scheduler.client.report [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1902.614690] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1902.614690] env[63379]: value = "task-1780260" [ 1902.614690] env[63379]: _type = "Task" [ 1902.614690] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.623952] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780260, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.673808] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bce207-0205-4c82-4832-1dc3b8703404, 'name': SearchDatastore_Task, 'duration_secs': 0.008987} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.674151] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.674423] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1902.674704] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d8d74ed-5d40-499c-87db-5529290e3bbe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.681249] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1902.681249] env[63379]: value = "task-1780261" [ 1902.681249] env[63379]: _type = "Task" [ 1902.681249] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.692499] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780261, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.753939] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da662865-da33-49d0-91c9-93dca6e7d6d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.763014] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf83abe-b40a-41b1-a89e-8520816753bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.794994] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0213a0-93d1-4ead-b211-a888cc1bd8fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.802348] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f22fe59-dede-4c95-bc19-794adc8194f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.816272] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.816716] env[63379]: DEBUG nova.compute.provider_tree [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1902.818143] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.818223] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1902.988791] env[63379]: DEBUG oslo_concurrency.lockutils [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.989112] env[63379]: DEBUG oslo_concurrency.lockutils [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.989370] env[63379]: INFO nova.compute.manager [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Attaching volume 5c8655ca-742a-4de6-850a-911164a51f15 to /dev/sdb [ 1903.005452] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780259, 'name': ReconfigVM_Task, 'duration_secs': 0.277984} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.005715] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6/1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1903.006515] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6495b767-3eb3-4af6-ae13-f8c6e5949798 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.013374] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1903.013374] env[63379]: value = "task-1780262" [ 1903.013374] env[63379]: _type = "Task" [ 1903.013374] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.021902] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780262, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.024130] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e2ed98-335a-4f42-b6f0-064d995a68d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.031020] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bde1941-6f34-4411-8d80-833a495f8f79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.044819] env[63379]: DEBUG nova.virt.block_device [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updating existing volume attachment record: 0e62bcb4-545d-44ef-9777-8ce19b8e9c62 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1903.124322] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780260, 'name': Rename_Task, 'duration_secs': 0.145044} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.124651] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1903.124961] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c95d0e2-5c1c-4bf4-bd45-bf74c72add21 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.132690] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1903.132690] env[63379]: value = "task-1780263" [ 1903.132690] env[63379]: _type = "Task" [ 1903.132690] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.141210] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.162550] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.162550] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.162856] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.192931] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780261, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.353070] env[63379]: DEBUG nova.scheduler.client.report [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1903.353426] env[63379]: DEBUG nova.compute.provider_tree [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 145 to 146 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1903.353654] env[63379]: DEBUG nova.compute.provider_tree [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1903.523723] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780262, 'name': Rename_Task, 'duration_secs': 0.154205} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.524035] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1903.524302] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-097f8a97-390f-4982-9a15-82772e9f6694 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.531085] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1903.531085] env[63379]: value = "task-1780265" [ 1903.531085] env[63379]: _type = "Task" [ 1903.531085] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.540363] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780265, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.643288] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780263, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.695903] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780261, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580289} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.696340] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1903.696656] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1903.697037] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffa3889e-6d6e-47c7-92d2-6e677fa7c190 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.705820] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1903.705820] env[63379]: value = "task-1780266" [ 1903.705820] env[63379]: _type = "Task" [ 1903.705820] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.715368] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.859508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.051s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.860073] env[63379]: DEBUG nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1903.863172] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.965s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.863389] env[63379]: DEBUG nova.objects.instance [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'resources' on Instance uuid 90f0c97d-695b-4975-8ab9-4e77a9175da1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1904.041302] env[63379]: DEBUG oslo_vmware.api [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780265, 'name': PowerOnVM_Task, 'duration_secs': 0.497293} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.041509] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1904.041720] env[63379]: INFO nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Took 14.04 seconds to spawn the instance on the hypervisor. [ 1904.041908] env[63379]: DEBUG nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1904.042706] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eecc4c0-a393-487c-ab99-33a7632fc594 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.143815] env[63379]: DEBUG oslo_vmware.api [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780263, 'name': PowerOnVM_Task, 'duration_secs': 0.638036} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.144223] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1904.144496] env[63379]: INFO nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Took 6.38 seconds to spawn the instance on the hypervisor. [ 1904.144713] env[63379]: DEBUG nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1904.145597] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf402128-56c5-4e10-945f-24fa6d2113e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.207159] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.207159] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.207330] env[63379]: DEBUG nova.network.neutron [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1904.219858] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074018} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.219858] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1904.219858] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6648f670-0b8a-4479-8912-96452116acce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.240134] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1904.240710] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ed0cdec-82b8-475b-87fc-a6605a499704 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.261212] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1904.261212] env[63379]: value = "task-1780267" [ 1904.261212] env[63379]: _type = "Task" [ 1904.261212] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.269403] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.367168] env[63379]: DEBUG nova.compute.utils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1904.373161] env[63379]: DEBUG nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1904.373360] env[63379]: DEBUG nova.network.neutron [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1904.459590] env[63379]: DEBUG nova.policy [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1904.513380] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d366e6ad-17ab-4667-8fd0-6c8e8f78d296 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.522066] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ccc8043-eca5-411a-9c70-5f27e832f9c4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.552059] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667c4dad-bd1a-48f1-a859-b8f5f44a2631 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.564317] env[63379]: INFO nova.compute.manager [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Took 19.89 seconds to build instance. [ 1904.568474] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb3e090-e5c7-45ed-8879-11c4427a216d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.582522] env[63379]: DEBUG nova.compute.provider_tree [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.661995] env[63379]: INFO nova.compute.manager [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Took 14.69 seconds to build instance. [ 1904.771093] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.876219] env[63379]: DEBUG nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1904.974564] env[63379]: DEBUG nova.network.neutron [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.978488] env[63379]: DEBUG nova.network.neutron [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Successfully created port: 41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1904.985462] env[63379]: DEBUG nova.compute.manager [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Received event network-changed-3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1904.985462] env[63379]: DEBUG nova.compute.manager [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Refreshing instance network info cache due to event network-changed-3a859294-da1a-435c-aa5c-a1ec72c124c2. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1904.985462] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] Acquiring lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.985462] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] Acquired lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.985462] env[63379]: DEBUG nova.network.neutron [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Refreshing network info cache for port 3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1905.068891] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2806b4c0-4fa7-41ab-b186-af0f194034bd tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.404s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.086407] env[63379]: DEBUG nova.scheduler.client.report [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1905.163882] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7e0344bd-0180-47e4-82d3-d70338e44b13 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.197s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.271091] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780267, 'name': ReconfigVM_Task, 'duration_secs': 0.864644} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.271538] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1905.272239] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3021df9-bf78-407f-8f70-4e32a26a4261 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.279862] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1905.279862] env[63379]: value = "task-1780269" [ 1905.279862] env[63379]: _type = "Task" [ 1905.279862] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.287784] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780269, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.478899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.592358] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.728s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.617805] env[63379]: INFO nova.scheduler.client.report [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted allocations for instance 90f0c97d-695b-4975-8ab9-4e77a9175da1 [ 1905.753186] env[63379]: DEBUG nova.network.neutron [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Updated VIF entry in instance network info cache for port 3a859294-da1a-435c-aa5c-a1ec72c124c2. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1905.753681] env[63379]: DEBUG nova.network.neutron [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Updating instance_info_cache with network_info: [{"id": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "address": "fa:16:3e:60:52:ad", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a859294-da", "ovs_interfaceid": "3a859294-da1a-435c-aa5c-a1ec72c124c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.790633] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780269, 'name': Rename_Task, 'duration_secs': 0.135279} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.790920] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1905.791183] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-499eb8c3-9753-46b1-976f-fed3e8cb1a1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.798305] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1905.798305] env[63379]: value = "task-1780270" [ 1905.798305] env[63379]: _type = "Task" [ 1905.798305] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.807180] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.838359] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.838520] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.838615] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1905.887891] env[63379]: DEBUG nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1905.914296] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1905.914545] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1905.914712] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1905.914894] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1905.915061] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1905.915221] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1905.915429] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1905.915600] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1905.915766] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1905.915932] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1905.916126] env[63379]: DEBUG nova.virt.hardware [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1905.917023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc43194-0207-4244-8327-b5cd44193f58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.924912] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81989596-b64a-4c88-915a-e026b603a7bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.999794] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9db7d78-0249-419e-b3da-b272ea23cb3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.018709] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d30129-73df-4587-9560-88b6499699d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.025252] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance '4b419aa8-d4da-45fd-a6da-6f05ee851f2f' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1906.126438] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b14460e3-4765-4738-b976-1b34e0ff611f tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "90f0c97d-695b-4975-8ab9-4e77a9175da1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.639s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.256825] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed05fe9d-58a1-4f9e-9cfb-abb14e3e2e33 req-2eb4dbf1-cf3c-41c0-97e9-5e6387a68a66 service nova] Releasing lock "refresh_cache-1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.308585] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780270, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.532719] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-951ec899-46fb-4a61-a948-597a505540e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance '4b419aa8-d4da-45fd-a6da-6f05ee851f2f' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1906.539266] env[63379]: DEBUG nova.network.neutron [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Successfully updated port: 41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1906.809651] env[63379]: DEBUG oslo_vmware.api [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780270, 'name': PowerOnVM_Task, 'duration_secs': 0.78026} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.810026] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1906.810181] env[63379]: INFO nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1906.810368] env[63379]: DEBUG nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1906.811376] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13273b30-f33a-4a45-8e6d-40c3506473ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.009942] env[63379]: DEBUG nova.compute.manager [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-vif-plugged-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1907.010182] env[63379]: DEBUG oslo_concurrency.lockutils [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] Acquiring lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.010395] env[63379]: DEBUG oslo_concurrency.lockutils [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] Lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.010630] env[63379]: DEBUG oslo_concurrency.lockutils [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] Lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.010747] env[63379]: DEBUG nova.compute.manager [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] No waiting events found dispatching network-vif-plugged-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1907.010915] env[63379]: WARNING nova.compute.manager [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received unexpected event network-vif-plugged-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 for instance with vm_state building and task_state spawning. [ 1907.011095] env[63379]: DEBUG nova.compute.manager [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1907.011258] env[63379]: DEBUG nova.compute.manager [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing instance network info cache due to event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1907.011442] env[63379]: DEBUG oslo_concurrency.lockutils [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.011582] env[63379]: DEBUG oslo_concurrency.lockutils [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.011742] env[63379]: DEBUG nova.network.neutron [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing network info cache for port 41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1907.046773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.084872] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.328017] env[63379]: INFO nova.compute.manager [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Took 17.11 seconds to build instance. [ 1907.391040] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "1c983c16-6f86-4932-9698-7fb1428ca231" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.391284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.542915] env[63379]: DEBUG nova.network.neutron [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1907.587796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.587796] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 1907.587997] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.588183] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.588336] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.588481] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.593751] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1907.594043] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369498', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'name': 'volume-5c8655ca-742a-4de6-850a-911164a51f15', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'serial': '5c8655ca-742a-4de6-850a-911164a51f15'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1907.594958] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16312865-6f0c-4851-801d-5717a3bc5053 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.611848] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492b73e0-ca90-48e2-bba1-b35ac50e0778 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.637645] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-5c8655ca-742a-4de6-850a-911164a51f15/volume-5c8655ca-742a-4de6-850a-911164a51f15.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1907.638666] env[63379]: DEBUG nova.network.neutron [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.639763] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0667bdb-0099-4ab9-bec4-a07f35132e53 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.660086] env[63379]: DEBUG oslo_vmware.api [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1907.660086] env[63379]: value = "task-1780271" [ 1907.660086] env[63379]: _type = "Task" [ 1907.660086] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.668392] env[63379]: DEBUG oslo_vmware.api [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.678282] env[63379]: INFO nova.compute.manager [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Rebuilding instance [ 1907.718891] env[63379]: DEBUG nova.compute.manager [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1907.719791] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b41ed2d-b243-4f46-b35a-0ee835ef3b70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.730067] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.829809] env[63379]: DEBUG oslo_concurrency.lockutils [None req-55fd8c94-e505-465e-b522-be990d52004a tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "14adcb7b-b754-407e-9a99-28a1ca2ede68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.618s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.893639] env[63379]: DEBUG nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1908.154217] env[63379]: DEBUG oslo_concurrency.lockutils [req-9502a1ed-9272-4c69-99d2-edc262fcfa5d req-da528f2d-d32c-4260-a899-4c199019e580 service nova] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.154587] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.154751] env[63379]: DEBUG nova.network.neutron [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1908.171258] env[63379]: DEBUG oslo_vmware.api [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780271, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.230647] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1908.230922] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93efd867-7d89-46ab-93f2-36e21256bd08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.240602] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1908.240602] env[63379]: value = "task-1780272" [ 1908.240602] env[63379]: _type = "Task" [ 1908.240602] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.248521] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.263202] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.263433] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.263626] env[63379]: DEBUG nova.compute.manager [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Going to confirm migration 7 {{(pid=63379) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1908.412870] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.413160] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.414662] env[63379]: INFO nova.compute.claims [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1908.671017] env[63379]: DEBUG oslo_vmware.api [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780271, 'name': ReconfigVM_Task, 'duration_secs': 0.54019} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.671668] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-5c8655ca-742a-4de6-850a-911164a51f15/volume-5c8655ca-742a-4de6-850a-911164a51f15.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1908.676298] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f8c5d3f-432b-41da-9aab-203e96c05d1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.686454] env[63379]: DEBUG nova.network.neutron [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1908.692895] env[63379]: DEBUG oslo_vmware.api [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1908.692895] env[63379]: value = "task-1780273" [ 1908.692895] env[63379]: _type = "Task" [ 1908.692895] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.700549] env[63379]: DEBUG oslo_vmware.api [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.750441] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780272, 'name': PowerOffVM_Task, 'duration_secs': 0.169404} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.750782] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1908.750986] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1908.754176] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ce69ea-a5f8-459f-a53e-a6bf5acb2237 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.761881] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1908.762159] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29ae604a-db78-452e-a34a-d5b2f86e26fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.789958] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1908.790401] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1908.790401] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleting the datastore file [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1908.790702] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e93cedfd-992f-4dcf-b22a-4e71cffb9727 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.797215] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1908.797215] env[63379]: value = "task-1780275" [ 1908.797215] env[63379]: _type = "Task" [ 1908.797215] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.808074] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.837320] env[63379]: DEBUG nova.network.neutron [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.843360] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.843550] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.843737] env[63379]: DEBUG nova.network.neutron [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1908.843928] env[63379]: DEBUG nova.objects.instance [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'info_cache' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1909.203984] env[63379]: DEBUG oslo_vmware.api [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780273, 'name': ReconfigVM_Task, 'duration_secs': 0.159282} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.204255] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369498', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'name': 'volume-5c8655ca-742a-4de6-850a-911164a51f15', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'serial': '5c8655ca-742a-4de6-850a-911164a51f15'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1909.309182] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120248} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.309452] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1909.309643] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1909.309827] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1909.340734] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.341018] env[63379]: DEBUG nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Instance network_info: |[{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1909.341459] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:56:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41bdc6f8-c059-49a5-86a4-a7a03cfe0300', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1909.349368] env[63379]: DEBUG oslo.service.loopingcall [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1909.351948] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1909.352575] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b7a9be5-104e-41ce-8428-2ac6b12dea50 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.372639] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1909.372639] env[63379]: value = "task-1780276" [ 1909.372639] env[63379]: _type = "Task" [ 1909.372639] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.380878] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780276, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.553776] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358aa171-e43a-40ff-994a-35a1681380c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.561788] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc10ea8-a453-42e2-8bb2-00ee60b3906e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.591368] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35745d23-aaac-4863-b727-06022d3978a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.599284] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffaff09c-3dcb-46ef-a460-36ba0574e252 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.612637] env[63379]: DEBUG nova.compute.provider_tree [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1909.883442] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780276, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.115796] env[63379]: DEBUG nova.scheduler.client.report [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1910.142768] env[63379]: DEBUG nova.network.neutron [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.240012] env[63379]: DEBUG nova.objects.instance [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid d3c05ba6-b565-4432-b815-14ae0933853e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1910.345896] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1910.346120] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1910.346292] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1910.346480] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1910.346631] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1910.346835] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1910.346993] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1910.347172] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1910.347342] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1910.347509] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1910.347981] env[63379]: DEBUG nova.virt.hardware [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1910.348615] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c18fc8-6af4-4bf9-926d-e62620f02dbe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.356699] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907afbe8-0560-4d17-8c9d-7b3099c42ec8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.370392] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1910.376029] env[63379]: DEBUG oslo.service.loopingcall [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1910.376352] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1910.379387] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12e9078f-e794-4e9f-b4bb-e62d5fd3e0b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.397369] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780276, 'name': CreateVM_Task, 'duration_secs': 0.684791} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.399030] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1910.399030] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1910.399030] env[63379]: value = "task-1780277" [ 1910.399030] env[63379]: _type = "Task" [ 1910.399030] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.399684] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.399966] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.400333] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1910.400627] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9412246-3aa8-4064-878c-49951f28409f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.409276] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1910.409276] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524bf322-fe68-5426-6d96-6bb28697fc8c" [ 1910.409276] env[63379]: _type = "Task" [ 1910.409276] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.414175] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780277, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.423207] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524bf322-fe68-5426-6d96-6bb28697fc8c, 'name': SearchDatastore_Task, 'duration_secs': 0.009647} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.423632] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.423717] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1910.423951] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.424120] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.424302] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1910.424561] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11ff5d20-aa53-4fde-a376-6255ffb66df1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.433486] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1910.433705] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1910.434586] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c94ef77b-3ad1-48af-a737-a4c01a99fe93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.440568] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1910.440568] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]523dde80-21f2-6dc2-91c5-beb28e220dbe" [ 1910.440568] env[63379]: _type = "Task" [ 1910.440568] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.449199] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523dde80-21f2-6dc2-91c5-beb28e220dbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.621433] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.622101] env[63379]: DEBUG nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1910.645451] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.646036] env[63379]: DEBUG nova.objects.instance [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'migration_context' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1910.745355] env[63379]: DEBUG oslo_concurrency.lockutils [None req-60643bc3-f627-45be-84c7-94de6d5a0cba tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.756s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.912919] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780277, 'name': CreateVM_Task, 'duration_secs': 0.252859} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.913276] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1910.913547] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.914180] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.914180] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1910.914393] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f05da8f-37d2-4132-a64a-427bbe27a9c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.919474] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1910.919474] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b0632a-8873-4d71-cdaa-08b3c66ece6a" [ 1910.919474] env[63379]: _type = "Task" [ 1910.919474] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.927746] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b0632a-8873-4d71-cdaa-08b3c66ece6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.952213] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523dde80-21f2-6dc2-91c5-beb28e220dbe, 'name': SearchDatastore_Task, 'duration_secs': 0.008438} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.953228] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca8ea02-84d2-4d40-bf9a-7f982271c179 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.958989] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1910.958989] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d13504-8134-f3f6-89b6-00ecc81effa2" [ 1910.958989] env[63379]: _type = "Task" [ 1910.958989] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.967400] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d13504-8134-f3f6-89b6-00ecc81effa2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.128037] env[63379]: DEBUG nova.compute.utils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1911.129298] env[63379]: DEBUG nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1911.129410] env[63379]: DEBUG nova.network.neutron [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1911.149134] env[63379]: DEBUG nova.objects.base [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Object Instance<4b419aa8-d4da-45fd-a6da-6f05ee851f2f> lazy-loaded attributes: info_cache,migration_context {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1911.150663] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8181c4df-061f-4b75-916d-cee8d8f75df1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.169850] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac1d4b35-9357-4356-9719-31daac76221d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.177201] env[63379]: DEBUG oslo_vmware.api [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1911.177201] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522e2dd1-aaaa-dbbd-34ed-081fe3ddd1d8" [ 1911.177201] env[63379]: _type = "Task" [ 1911.177201] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.185280] env[63379]: DEBUG oslo_vmware.api [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522e2dd1-aaaa-dbbd-34ed-081fe3ddd1d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.202452] env[63379]: DEBUG nova.policy [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'deef4f9ae0754a6c8a7f673c10a76408', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c01c5c8c3734c4ea066324e542e7374', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1911.430518] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b0632a-8873-4d71-cdaa-08b3c66ece6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010092} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.430864] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.431098] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1911.431326] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.469289] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d13504-8134-f3f6-89b6-00ecc81effa2, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.469604] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.469875] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d4988643-18ff-44c8-8363-e0de43da2abe/d4988643-18ff-44c8-8363-e0de43da2abe.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1911.470202] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.470398] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1911.470639] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6323207e-cc13-4a8b-9d28-61702d6ff0fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.473429] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06a04c2d-28ef-4191-950e-b30370be491c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.480297] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1911.480297] env[63379]: value = "task-1780278" [ 1911.480297] env[63379]: _type = "Task" [ 1911.480297] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.490236] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.502820] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1911.503168] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1911.503979] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a131b45-ae01-4778-9da3-b50a9b7e48c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.516022] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1911.516022] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b140f4-65fa-dace-a240-d034492f90a4" [ 1911.516022] env[63379]: _type = "Task" [ 1911.516022] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.524338] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b140f4-65fa-dace-a240-d034492f90a4, 'name': SearchDatastore_Task, 'duration_secs': 0.008506} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.525283] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00a2b799-ed8c-4445-ba31-f51d6d941bd6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.528480] env[63379]: DEBUG nova.network.neutron [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Successfully created port: 62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1911.533797] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1911.533797] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d55f35-a6ec-9464-75d2-94d016dc3dfc" [ 1911.533797] env[63379]: _type = "Task" [ 1911.533797] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.541844] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d55f35-a6ec-9464-75d2-94d016dc3dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.632906] env[63379]: DEBUG nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1911.692060] env[63379]: DEBUG oslo_vmware.api [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522e2dd1-aaaa-dbbd-34ed-081fe3ddd1d8, 'name': SearchDatastore_Task, 'duration_secs': 0.008122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.692501] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.692824] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.990507] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780278, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493988} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.990941] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d4988643-18ff-44c8-8363-e0de43da2abe/d4988643-18ff-44c8-8363-e0de43da2abe.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1911.990995] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1911.991279] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-429ba7af-6df7-44fb-82d7-24e15a71f75f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.998122] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1911.998122] env[63379]: value = "task-1780279" [ 1911.998122] env[63379]: _type = "Task" [ 1911.998122] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.005649] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.045410] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d55f35-a6ec-9464-75d2-94d016dc3dfc, 'name': SearchDatastore_Task, 'duration_secs': 0.009019} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.045528] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.045782] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1912.046057] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9349d305-558f-4c09-9363-7a380f4ed863 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.052820] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1912.052820] env[63379]: value = "task-1780280" [ 1912.052820] env[63379]: _type = "Task" [ 1912.052820] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.061483] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.110906] env[63379]: DEBUG oslo_concurrency.lockutils [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.111182] env[63379]: DEBUG oslo_concurrency.lockutils [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.349367] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f582230f-ab37-44ad-a68a-c0388630eef5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.358471] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e676ce-9317-48b9-9285-87fc14cca217 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.391115] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8433b796-4b07-4e45-a235-b1a2797e3967 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.399615] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f38ed6-8b75-4853-810b-ed37cd9c6dd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.416402] env[63379]: DEBUG nova.compute.provider_tree [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1912.509138] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066141} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.509481] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1912.510381] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fd7599-5a80-48fc-a5ce-c593c0970415 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.533322] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] d4988643-18ff-44c8-8363-e0de43da2abe/d4988643-18ff-44c8-8363-e0de43da2abe.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1912.533631] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cae602a0-956d-47e6-8bf7-4357066dc094 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.552644] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1912.552644] env[63379]: value = "task-1780281" [ 1912.552644] env[63379]: _type = "Task" [ 1912.552644] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.563065] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.565944] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464475} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.566197] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1912.566412] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1912.566650] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-838c2c8c-0719-4d7e-b344-4019e8d618bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.572041] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1912.572041] env[63379]: value = "task-1780282" [ 1912.572041] env[63379]: _type = "Task" [ 1912.572041] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.579303] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780282, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.614752] env[63379]: DEBUG nova.compute.utils [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1912.642933] env[63379]: DEBUG nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1912.670740] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1912.671007] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1912.671183] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1912.671377] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1912.671532] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1912.671687] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1912.671900] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1912.672076] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1912.672250] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1912.672417] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1912.672599] env[63379]: DEBUG nova.virt.hardware [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1912.673521] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db08f81-b1f0-475a-969a-da1c65aee72f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.681189] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fe2b6e-068d-448e-9bc5-983a28139cbd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.920772] env[63379]: DEBUG nova.scheduler.client.report [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1913.063224] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780281, 'name': ReconfigVM_Task, 'duration_secs': 0.304244} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.063591] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Reconfigured VM instance instance-0000006a to attach disk [datastore1] d4988643-18ff-44c8-8363-e0de43da2abe/d4988643-18ff-44c8-8363-e0de43da2abe.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1913.064169] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-199fd0e9-0905-4ddf-b01d-dde3ca74aadc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.070447] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1913.070447] env[63379]: value = "task-1780283" [ 1913.070447] env[63379]: _type = "Task" [ 1913.070447] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.084677] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780282, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082492} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.084887] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780283, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.085149] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1913.085865] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b164db1d-2bda-4f7c-a95d-449df6670c74 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.104563] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1913.104801] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baa13287-b572-4fbe-9a0d-4a9c5c3e4a24 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.118629] env[63379]: DEBUG oslo_concurrency.lockutils [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.124489] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1913.124489] env[63379]: value = "task-1780284" [ 1913.124489] env[63379]: _type = "Task" [ 1913.124489] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.132036] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780284, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.504908] env[63379]: DEBUG nova.compute.manager [req-33798672-0ecd-4ade-8ced-585ebb24fb98 req-b97a5bbd-85d0-46b4-8aba-cd4a9662ced4 service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Received event network-vif-plugged-62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1913.505158] env[63379]: DEBUG oslo_concurrency.lockutils [req-33798672-0ecd-4ade-8ced-585ebb24fb98 req-b97a5bbd-85d0-46b4-8aba-cd4a9662ced4 service nova] Acquiring lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.505377] env[63379]: DEBUG oslo_concurrency.lockutils [req-33798672-0ecd-4ade-8ced-585ebb24fb98 req-b97a5bbd-85d0-46b4-8aba-cd4a9662ced4 service nova] Lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.505554] env[63379]: DEBUG oslo_concurrency.lockutils [req-33798672-0ecd-4ade-8ced-585ebb24fb98 req-b97a5bbd-85d0-46b4-8aba-cd4a9662ced4 service nova] Lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.505762] env[63379]: DEBUG nova.compute.manager [req-33798672-0ecd-4ade-8ced-585ebb24fb98 req-b97a5bbd-85d0-46b4-8aba-cd4a9662ced4 service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] No waiting events found dispatching network-vif-plugged-62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1913.505941] env[63379]: WARNING nova.compute.manager [req-33798672-0ecd-4ade-8ced-585ebb24fb98 req-b97a5bbd-85d0-46b4-8aba-cd4a9662ced4 service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Received unexpected event network-vif-plugged-62d6fce2-bf52-422e-8166-344c4fd61274 for instance with vm_state building and task_state spawning. [ 1913.581641] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780283, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.636268] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780284, 'name': ReconfigVM_Task, 'duration_secs': 0.25811} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.636268] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68/14adcb7b-b754-407e-9a99-28a1ca2ede68.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1913.636268] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6b887a3-5ff4-4ff1-9ceb-2ff7f9f09f05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.637597] env[63379]: DEBUG nova.network.neutron [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Successfully updated port: 62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1913.645793] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1913.645793] env[63379]: value = "task-1780285" [ 1913.645793] env[63379]: _type = "Task" [ 1913.645793] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.658976] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780285, 'name': Rename_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.931572] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.239s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.931831] env[63379]: DEBUG nova.compute.manager [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63379) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 1914.081883] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780283, 'name': Rename_Task, 'duration_secs': 0.863625} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.082247] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1914.082434] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c630fa7b-f8ee-481a-ad8d-de210c1f14c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.088330] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1914.088330] env[63379]: value = "task-1780286" [ 1914.088330] env[63379]: _type = "Task" [ 1914.088330] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.096127] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.141975] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.142211] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.142419] env[63379]: DEBUG nova.network.neutron [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.155936] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780285, 'name': Rename_Task, 'duration_secs': 0.134004} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.156211] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1914.156457] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dffa83f2-d26f-4ec8-a185-7fd89842b436 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.163611] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1914.163611] env[63379]: value = "task-1780287" [ 1914.163611] env[63379]: _type = "Task" [ 1914.163611] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.172534] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780287, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.193699] env[63379]: DEBUG oslo_concurrency.lockutils [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.193948] env[63379]: DEBUG oslo_concurrency.lockutils [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.194197] env[63379]: INFO nova.compute.manager [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Attaching volume dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c to /dev/sdc [ 1914.227050] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ba0991-45d8-4b9f-b8e0-f0ec09e5d4bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.234230] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9861d663-49a5-4d49-a7e3-f9bd03e6da93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.246850] env[63379]: DEBUG nova.virt.block_device [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updating existing volume attachment record: de8efdde-7106-4d9a-8a75-55607347532e {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1914.490796] env[63379]: INFO nova.scheduler.client.report [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted allocation for migration a9afaa25-c843-4661-9b08-5f138ce82641 [ 1914.598648] env[63379]: DEBUG oslo_vmware.api [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780286, 'name': PowerOnVM_Task, 'duration_secs': 0.460052} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.600054] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1914.600054] env[63379]: INFO nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Took 8.71 seconds to spawn the instance on the hypervisor. [ 1914.600054] env[63379]: DEBUG nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1914.600641] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7129e9a-02c7-402a-9f10-ec15a69309b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.674543] env[63379]: DEBUG oslo_vmware.api [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780287, 'name': PowerOnVM_Task, 'duration_secs': 0.437591} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.674835] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1914.675064] env[63379]: DEBUG nova.compute.manager [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1914.675875] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52e252e-7551-47a5-ac40-330a5f349c2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.696551] env[63379]: DEBUG nova.network.neutron [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1914.866689] env[63379]: DEBUG nova.network.neutron [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.997502] env[63379]: DEBUG oslo_concurrency.lockutils [None req-52e8076d-7018-4ba3-b9b3-8528bc26e71d tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.734s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.120304] env[63379]: INFO nova.compute.manager [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Took 16.35 seconds to build instance. [ 1915.194805] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.195076] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.195265] env[63379]: DEBUG nova.objects.instance [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63379) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1915.369827] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.370215] env[63379]: DEBUG nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Instance network_info: |[{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1915.370644] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:3b:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62d6fce2-bf52-422e-8166-344c4fd61274', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1915.378070] env[63379]: DEBUG oslo.service.loopingcall [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1915.378274] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1915.378496] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98445b20-ac7f-4b65-b6d4-0d9c9d73405b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.398805] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1915.398805] env[63379]: value = "task-1780289" [ 1915.398805] env[63379]: _type = "Task" [ 1915.398805] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.407738] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780289, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.420017] env[63379]: DEBUG nova.objects.instance [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'flavor' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.522451] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "14adcb7b-b754-407e-9a99-28a1ca2ede68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.522804] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "14adcb7b-b754-407e-9a99-28a1ca2ede68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.523075] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "14adcb7b-b754-407e-9a99-28a1ca2ede68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.523310] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "14adcb7b-b754-407e-9a99-28a1ca2ede68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.523496] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "14adcb7b-b754-407e-9a99-28a1ca2ede68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.525625] env[63379]: INFO nova.compute.manager [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Terminating instance [ 1915.527344] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "refresh_cache-14adcb7b-b754-407e-9a99-28a1ca2ede68" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.527438] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "refresh_cache-14adcb7b-b754-407e-9a99-28a1ca2ede68" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.527584] env[63379]: DEBUG nova.network.neutron [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1915.530497] env[63379]: DEBUG nova.compute.manager [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Received event network-changed-62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1915.530680] env[63379]: DEBUG nova.compute.manager [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Refreshing instance network info cache due to event network-changed-62d6fce2-bf52-422e-8166-344c4fd61274. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1915.530874] env[63379]: DEBUG oslo_concurrency.lockutils [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] Acquiring lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.531085] env[63379]: DEBUG oslo_concurrency.lockutils [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] Acquired lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.531356] env[63379]: DEBUG nova.network.neutron [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Refreshing network info cache for port 62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1915.621945] env[63379]: DEBUG oslo_concurrency.lockutils [None req-adcd655b-c691-44ae-a41d-8e42fbac88bf tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "d4988643-18ff-44c8-8363-e0de43da2abe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.862s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.731022] env[63379]: DEBUG nova.compute.manager [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1915.731255] env[63379]: DEBUG nova.compute.manager [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing instance network info cache due to event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1915.731465] env[63379]: DEBUG oslo_concurrency.lockutils [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.731618] env[63379]: DEBUG oslo_concurrency.lockutils [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.731892] env[63379]: DEBUG nova.network.neutron [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1915.909233] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780289, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.925032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.925216] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.925441] env[63379]: DEBUG nova.network.neutron [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1915.925692] env[63379]: DEBUG nova.objects.instance [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'info_cache' on Instance uuid 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1916.051993] env[63379]: DEBUG nova.network.neutron [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1916.112340] env[63379]: DEBUG nova.network.neutron [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.203602] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc490d10-228c-4058-b124-445d1b3ca7b1 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.356939] env[63379]: DEBUG nova.network.neutron [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updated VIF entry in instance network info cache for port 62d6fce2-bf52-422e-8166-344c4fd61274. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1916.357369] env[63379]: DEBUG nova.network.neutron [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.410667] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780289, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.430312] env[63379]: DEBUG nova.objects.base [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Object Instance<4b419aa8-d4da-45fd-a6da-6f05ee851f2f> lazy-loaded attributes: flavor,info_cache {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1916.615379] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "refresh_cache-14adcb7b-b754-407e-9a99-28a1ca2ede68" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.615914] env[63379]: DEBUG nova.compute.manager [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1916.616138] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1916.617013] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207a9ec7-81fd-423b-8013-5cfefaebf7f2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.626569] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1916.626569] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b332dc2a-c472-4cdb-a4d8-3860ee2812f0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.633532] env[63379]: DEBUG oslo_vmware.api [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1916.633532] env[63379]: value = "task-1780291" [ 1916.633532] env[63379]: _type = "Task" [ 1916.633532] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.641481] env[63379]: DEBUG oslo_vmware.api [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.719159] env[63379]: DEBUG nova.network.neutron [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updated VIF entry in instance network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1916.719841] env[63379]: DEBUG nova.network.neutron [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.860736] env[63379]: DEBUG oslo_concurrency.lockutils [req-5ef32672-7556-48dc-95c1-e4ff1f30a0e5 req-0dedb774-7f5a-4115-aca5-162679dd1f5c service nova] Releasing lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.910806] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780289, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.143313] env[63379]: DEBUG oslo_vmware.api [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780291, 'name': PowerOffVM_Task, 'duration_secs': 0.121103} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.143643] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1917.143841] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1917.144198] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fda67436-d13d-4099-86d2-9f164150cd19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.150197] env[63379]: DEBUG nova.network.neutron [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [{"id": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "address": "fa:16:3e:f4:8d:6f", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4e8381c-6e", "ovs_interfaceid": "d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.171658] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1917.171896] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1917.172097] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleting the datastore file [datastore1] 14adcb7b-b754-407e-9a99-28a1ca2ede68 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1917.172372] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be62c8ba-d191-4711-b896-551b203dc42c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.179798] env[63379]: DEBUG oslo_vmware.api [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1917.179798] env[63379]: value = "task-1780293" [ 1917.179798] env[63379]: _type = "Task" [ 1917.179798] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.188468] env[63379]: DEBUG oslo_vmware.api [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.222882] env[63379]: DEBUG oslo_concurrency.lockutils [req-66ad73bb-28a8-45f3-ae1e-47482109820a req-da6b5596-c9e3-4532-94ea-dd1d53abe333 service nova] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.411497] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780289, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.654042] env[63379]: DEBUG oslo_concurrency.lockutils [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-4b419aa8-d4da-45fd-a6da-6f05ee851f2f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.690028] env[63379]: DEBUG oslo_vmware.api [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091961} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.690028] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1917.690028] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1917.690028] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1917.690028] env[63379]: INFO nova.compute.manager [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1917.690292] env[63379]: DEBUG oslo.service.loopingcall [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1917.690359] env[63379]: DEBUG nova.compute.manager [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1917.690454] env[63379]: DEBUG nova.network.neutron [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1917.705252] env[63379]: DEBUG nova.network.neutron [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1917.761120] env[63379]: DEBUG nova.compute.manager [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1917.761333] env[63379]: DEBUG nova.compute.manager [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing instance network info cache due to event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1917.761544] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.761692] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.761860] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing network info cache for port 41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1917.914758] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780289, 'name': CreateVM_Task, 'duration_secs': 2.204635} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.914758] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1917.914758] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.914758] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.914758] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1917.914758] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04f7a56e-978c-422e-a003-839c42f02878 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.920020] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1917.920020] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e55095-9b4a-9c05-77c8-65bdd2fcfe05" [ 1917.920020] env[63379]: _type = "Task" [ 1917.920020] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.925204] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e55095-9b4a-9c05-77c8-65bdd2fcfe05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.159804] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1918.159804] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-453e1bbd-3236-4213-be2f-8fc833933b9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.164637] env[63379]: DEBUG oslo_vmware.api [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1918.164637] env[63379]: value = "task-1780294" [ 1918.164637] env[63379]: _type = "Task" [ 1918.164637] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.172867] env[63379]: DEBUG oslo_vmware.api [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.208980] env[63379]: DEBUG nova.network.neutron [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.427882] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e55095-9b4a-9c05-77c8-65bdd2fcfe05, 'name': SearchDatastore_Task, 'duration_secs': 0.008814} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.428286] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.428449] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1918.428711] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.428880] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.429069] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1918.429330] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95397929-aad2-4d12-91e1-a131915990f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.442179] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1918.442381] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1918.443191] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf01ebd0-4a63-4e68-a0d4-7e6dc0708d27 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.448395] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1918.448395] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5232b85d-9ef7-9431-bbf5-2fcd38600377" [ 1918.448395] env[63379]: _type = "Task" [ 1918.448395] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.456737] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5232b85d-9ef7-9431-bbf5-2fcd38600377, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.472430] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updated VIF entry in instance network info cache for port 41bdc6f8-c059-49a5-86a4-a7a03cfe0300. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1918.472779] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.674611] env[63379]: DEBUG oslo_vmware.api [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780294, 'name': PowerOnVM_Task, 'duration_secs': 0.397617} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.674890] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1918.675107] env[63379]: DEBUG nova.compute.manager [None req-1db8ced8-f340-4257-846c-980b8dc57299 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1918.675987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554a3d8a-a96e-4837-8f17-bb326a775458 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.711989] env[63379]: INFO nova.compute.manager [-] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Took 1.02 seconds to deallocate network for instance. [ 1918.790112] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1918.790112] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369501', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'name': 'volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'serial': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1918.791288] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfb446e-67d1-4e81-8f7c-191a1dc3a164 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.808599] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6d3726-b9ac-4394-ac63-3c467bc19c87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.835562] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c/volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1918.835885] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a249d2b-d44e-4f2d-b799-af97f6e00387 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.856545] env[63379]: DEBUG oslo_vmware.api [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1918.856545] env[63379]: value = "task-1780295" [ 1918.856545] env[63379]: _type = "Task" [ 1918.856545] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.865073] env[63379]: DEBUG oslo_vmware.api [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780295, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.961029] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5232b85d-9ef7-9431-bbf5-2fcd38600377, 'name': SearchDatastore_Task, 'duration_secs': 0.019505} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.961813] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1453b0a-a9a4-442f-9733-dfba27e71249 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.966844] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1918.966844] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e0ba9-4b7e-bad8-6770-ea1e29d67be7" [ 1918.966844] env[63379]: _type = "Task" [ 1918.966844] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.974256] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e0ba9-4b7e-bad8-6770-ea1e29d67be7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.974712] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.974950] env[63379]: DEBUG nova.compute.manager [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1918.975139] env[63379]: DEBUG nova.compute.manager [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing instance network info cache due to event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1918.975346] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.975510] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.975710] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing network info cache for port 41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1919.218446] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.218818] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.219309] env[63379]: DEBUG nova.objects.instance [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lazy-loading 'resources' on Instance uuid 14adcb7b-b754-407e-9a99-28a1ca2ede68 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1919.353341] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.353975] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.353975] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.354178] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.354235] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.356420] env[63379]: INFO nova.compute.manager [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Terminating instance [ 1919.361329] env[63379]: DEBUG nova.compute.manager [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1919.361553] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1919.362576] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c998c3c-40c7-49be-aca5-7b698587f629 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.371911] env[63379]: DEBUG oslo_vmware.api [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780295, 'name': ReconfigVM_Task, 'duration_secs': 0.408377} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.373989] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c/volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1919.378770] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1919.379034] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63a84413-f276-47e3-abc2-e996dfd00c0e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.388945] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5c5e90c-39f1-4f4b-83ab-ea19ed76b74d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.395200] env[63379]: DEBUG oslo_vmware.api [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1919.395200] env[63379]: value = "task-1780297" [ 1919.395200] env[63379]: _type = "Task" [ 1919.395200] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.396377] env[63379]: DEBUG oslo_vmware.api [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1919.396377] env[63379]: value = "task-1780296" [ 1919.396377] env[63379]: _type = "Task" [ 1919.396377] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.407169] env[63379]: DEBUG oslo_vmware.api [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780297, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.410141] env[63379]: DEBUG oslo_vmware.api [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.477288] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521e0ba9-4b7e-bad8-6770-ea1e29d67be7, 'name': SearchDatastore_Task, 'duration_secs': 0.008624} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.479511] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.479787] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1919.480079] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49006e36-953a-40b3-a296-73cc9cc384de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.486820] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1919.486820] env[63379]: value = "task-1780298" [ 1919.486820] env[63379]: _type = "Task" [ 1919.486820] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.494749] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780298, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.707783] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updated VIF entry in instance network info cache for port 41bdc6f8-c059-49a5-86a4-a7a03cfe0300. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1919.708393] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.874240] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0398967e-6bf4-4139-9c54-3bd952e6a77c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.882713] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57273291-194f-4271-9e8f-ec445cf8a4b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.923529] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6035b905-e111-4c64-a72d-808eff9cdac3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.932370] env[63379]: DEBUG oslo_vmware.api [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780297, 'name': ReconfigVM_Task, 'duration_secs': 0.170196} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.938388] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369501', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'name': 'volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'serial': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1919.941018] env[63379]: DEBUG oslo_vmware.api [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780296, 'name': PowerOffVM_Task, 'duration_secs': 0.180234} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.942745] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e86ed4-b93e-42f6-a91a-0344f853405b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.947573] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1919.947906] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1919.948307] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84d5a013-5b89-4966-b2a3-bdcf72b90f1d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.961689] env[63379]: DEBUG nova.compute.provider_tree [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.997105] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780298, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460936} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.997374] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1919.997593] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1919.997986] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75ae64c7-9945-4d2e-9435-239a5008de8f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.004741] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1920.004741] env[63379]: value = "task-1780300" [ 1920.004741] env[63379]: _type = "Task" [ 1920.004741] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.012346] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780300, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.064571] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1920.064733] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1920.064788] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleting the datastore file [datastore1] 4b419aa8-d4da-45fd-a6da-6f05ee851f2f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1920.065108] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7e987d4-2267-4ce7-bc8c-490dee9fadc3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.072295] env[63379]: DEBUG oslo_vmware.api [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1920.072295] env[63379]: value = "task-1780301" [ 1920.072295] env[63379]: _type = "Task" [ 1920.072295] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.083930] env[63379]: DEBUG oslo_vmware.api [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.212349] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.212644] env[63379]: DEBUG nova.compute.manager [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1920.212826] env[63379]: DEBUG nova.compute.manager [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing instance network info cache due to event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1920.213075] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.213230] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.213402] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1920.464909] env[63379]: DEBUG nova.scheduler.client.report [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1920.514333] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780300, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070492} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.514712] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1920.515664] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208b2c48-d01c-4acc-b081-9607627f17a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.537479] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1920.537796] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f951e96-1fbc-46f9-af18-57321711d67c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.557455] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1920.557455] env[63379]: value = "task-1780302" [ 1920.557455] env[63379]: _type = "Task" [ 1920.557455] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.566694] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780302, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.580782] env[63379]: DEBUG oslo_vmware.api [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.946447] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updated VIF entry in instance network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1920.947018] env[63379]: DEBUG nova.network.neutron [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.970449] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.974968] env[63379]: DEBUG nova.objects.instance [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid d3c05ba6-b565-4432-b815-14ae0933853e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1920.997702] env[63379]: INFO nova.scheduler.client.report [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleted allocations for instance 14adcb7b-b754-407e-9a99-28a1ca2ede68 [ 1921.067201] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.081609] env[63379]: DEBUG oslo_vmware.api [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.953828} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.083023] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1921.083023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1921.083023] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1921.083023] env[63379]: INFO nova.compute.manager [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1921.083023] env[63379]: DEBUG oslo.service.loopingcall [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1921.083023] env[63379]: DEBUG nova.compute.manager [-] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1921.083287] env[63379]: DEBUG nova.network.neutron [-] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1921.451210] env[63379]: DEBUG oslo_concurrency.lockutils [req-3570f678-5636-441b-ad23-ba1de1602b3a req-6d3b01ac-c896-49f8-add7-88b432f0b34a service nova] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.483409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-172f51cd-0fa5-478e-a0a4-24498417f1ac tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.289s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.506060] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9486a7b5-7f0c-4d48-9ad7-8f8e810d0b99 tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "14adcb7b-b754-407e-9a99-28a1ca2ede68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.983s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.558796] env[63379]: DEBUG nova.compute.manager [req-c5ee94d6-8dba-4131-9f2e-35063f12fba2 req-a41c1ffd-b776-49ae-bdd1-8c29492f6432 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Received event network-vif-deleted-d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1921.559082] env[63379]: INFO nova.compute.manager [req-c5ee94d6-8dba-4131-9f2e-35063f12fba2 req-a41c1ffd-b776-49ae-bdd1-8c29492f6432 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Neutron deleted interface d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e; detaching it from the instance and deleting it from the info cache [ 1921.559236] env[63379]: DEBUG nova.network.neutron [req-c5ee94d6-8dba-4131-9f2e-35063f12fba2 req-a41c1ffd-b776-49ae-bdd1-8c29492f6432 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.569956] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780302, 'name': ReconfigVM_Task, 'duration_secs': 0.620617} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.570689] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1921.571318] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4060eb5d-7c72-4576-b1a7-1b4395b1521a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.578416] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1921.578416] env[63379]: value = "task-1780303" [ 1921.578416] env[63379]: _type = "Task" [ 1921.578416] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.586377] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780303, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.847864] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.848160] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.041031] env[63379]: DEBUG nova.network.neutron [-] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.045883] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.046188] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.046407] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.046600] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.046826] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.049878] env[63379]: INFO nova.compute.manager [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Terminating instance [ 1922.055145] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "refresh_cache-b3f753e3-2ec6-4359-8de0-f9c771e274e5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.055316] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquired lock "refresh_cache-b3f753e3-2ec6-4359-8de0-f9c771e274e5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.055511] env[63379]: DEBUG nova.network.neutron [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1922.061974] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b53d2b01-b319-4949-aae7-d04ad847e96e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.072163] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac3a2b5-bcc2-4ad2-acee-d00560c1de60 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.098629] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780303, 'name': Rename_Task, 'duration_secs': 0.148097} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.098961] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1922.099286] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6637f58-be6e-4bdd-bb24-f65c789752bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.111018] env[63379]: DEBUG nova.compute.manager [req-c5ee94d6-8dba-4131-9f2e-35063f12fba2 req-a41c1ffd-b776-49ae-bdd1-8c29492f6432 service nova] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Detach interface failed, port_id=d4e8381c-6eb1-4ebe-a6a3-b89ee2eb423e, reason: Instance 4b419aa8-d4da-45fd-a6da-6f05ee851f2f could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1922.112610] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1922.112610] env[63379]: value = "task-1780304" [ 1922.112610] env[63379]: _type = "Task" [ 1922.112610] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.121118] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.351047] env[63379]: INFO nova.compute.manager [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Detaching volume 5c8655ca-742a-4de6-850a-911164a51f15 [ 1922.383875] env[63379]: INFO nova.virt.block_device [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Attempting to driver detach volume 5c8655ca-742a-4de6-850a-911164a51f15 from mountpoint /dev/sdb [ 1922.384178] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1922.384603] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369498', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'name': 'volume-5c8655ca-742a-4de6-850a-911164a51f15', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'serial': '5c8655ca-742a-4de6-850a-911164a51f15'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1922.385319] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9a47e2-193f-425a-9965-e56e19738fe3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.409972] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b78fbd8-88d4-4349-9cd6-0e50b64e0798 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.417588] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee45ba2-74eb-4320-8c0a-4b86e7432284 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.442077] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c3feb2-a972-4516-89b3-488ad5060381 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.458503] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] The volume has not been displaced from its original location: [datastore1] volume-5c8655ca-742a-4de6-850a-911164a51f15/volume-5c8655ca-742a-4de6-850a-911164a51f15.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1922.463788] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1922.464118] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acdd6ca1-9817-4147-9808-25d11e79b6ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.482063] env[63379]: DEBUG oslo_vmware.api [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1922.482063] env[63379]: value = "task-1780305" [ 1922.482063] env[63379]: _type = "Task" [ 1922.482063] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.489740] env[63379]: DEBUG oslo_vmware.api [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.543660] env[63379]: INFO nova.compute.manager [-] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Took 1.46 seconds to deallocate network for instance. [ 1922.574722] env[63379]: DEBUG nova.network.neutron [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1922.622740] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780304, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.629598] env[63379]: DEBUG nova.network.neutron [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.642534] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.642849] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.992429] env[63379]: DEBUG oslo_vmware.api [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780305, 'name': ReconfigVM_Task, 'duration_secs': 0.488235} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.992749] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1922.997738] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-899fb305-00d7-4009-8356-4878962c296f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.012770] env[63379]: DEBUG oslo_vmware.api [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1923.012770] env[63379]: value = "task-1780306" [ 1923.012770] env[63379]: _type = "Task" [ 1923.012770] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.021808] env[63379]: DEBUG oslo_vmware.api [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.050246] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.050525] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.050747] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.074521] env[63379]: INFO nova.scheduler.client.report [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted allocations for instance 4b419aa8-d4da-45fd-a6da-6f05ee851f2f [ 1923.123502] env[63379]: DEBUG oslo_vmware.api [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780304, 'name': PowerOnVM_Task, 'duration_secs': 0.80891} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.124494] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1923.124726] env[63379]: INFO nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Took 10.48 seconds to spawn the instance on the hypervisor. [ 1923.124929] env[63379]: DEBUG nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1923.125809] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468b88eb-d7e5-4672-aec8-2a2e8c8ad481 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.132384] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Releasing lock "refresh_cache-b3f753e3-2ec6-4359-8de0-f9c771e274e5" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.132762] env[63379]: DEBUG nova.compute.manager [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1923.132955] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1923.136337] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3fd5a2-a97b-4593-aeec-d8b21eadeb7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.143473] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1923.143729] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f44ccb0-698a-41fa-9c62-c519cd4921fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.145549] env[63379]: INFO nova.compute.manager [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Detaching volume 6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2 [ 1923.154550] env[63379]: DEBUG oslo_vmware.api [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1923.154550] env[63379]: value = "task-1780307" [ 1923.154550] env[63379]: _type = "Task" [ 1923.154550] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.162971] env[63379]: DEBUG oslo_vmware.api [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.176610] env[63379]: INFO nova.virt.block_device [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Attempting to driver detach volume 6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2 from mountpoint /dev/sdb [ 1923.176904] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1923.177160] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1923.178344] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309f7187-4b42-472c-9294-2bbaead6e0ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.202735] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb77f49-3fce-4379-a9e7-21e1a0441436 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.210011] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fd74e8-cefe-4733-81e1-317c608c1186 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.230625] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a783ebb1-3f6b-4731-a161-282239dee8be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.245271] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] The volume has not been displaced from its original location: [datastore1] volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2/volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1923.250464] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1923.250747] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13871772-98d1-4e6d-9a37-c51b32a85564 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.268796] env[63379]: DEBUG oslo_vmware.api [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1923.268796] env[63379]: value = "task-1780308" [ 1923.268796] env[63379]: _type = "Task" [ 1923.268796] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.276888] env[63379]: DEBUG oslo_vmware.api [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780308, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.524740] env[63379]: DEBUG oslo_vmware.api [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780306, 'name': ReconfigVM_Task, 'duration_secs': 0.147304} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.525424] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369498', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'name': 'volume-5c8655ca-742a-4de6-850a-911164a51f15', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': '5c8655ca-742a-4de6-850a-911164a51f15', 'serial': '5c8655ca-742a-4de6-850a-911164a51f15'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1923.582298] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fd96a863-c0f7-4359-8f13-c4a73258a3a6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "4b419aa8-d4da-45fd-a6da-6f05ee851f2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.229s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.647800] env[63379]: INFO nova.compute.manager [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Took 15.25 seconds to build instance. [ 1923.669111] env[63379]: DEBUG oslo_vmware.api [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780307, 'name': PowerOffVM_Task, 'duration_secs': 0.229901} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.669495] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1923.669742] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1923.670061] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3e07f0a-d72d-41a0-a7db-dc88fd370b78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.694774] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1923.694886] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1923.695045] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleting the datastore file [datastore1] b3f753e3-2ec6-4359-8de0-f9c771e274e5 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1923.695309] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f8a0daa-3b9f-4b89-926f-e1dd603c2281 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.702294] env[63379]: DEBUG oslo_vmware.api [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for the task: (returnval){ [ 1923.702294] env[63379]: value = "task-1780310" [ 1923.702294] env[63379]: _type = "Task" [ 1923.702294] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.710773] env[63379]: DEBUG oslo_vmware.api [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.779000] env[63379]: DEBUG oslo_vmware.api [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780308, 'name': ReconfigVM_Task, 'duration_secs': 0.226354} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.779478] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1923.786907] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f571918a-38e9-405a-942b-38475e5a895e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.803481] env[63379]: DEBUG oslo_vmware.api [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1923.803481] env[63379]: value = "task-1780311" [ 1923.803481] env[63379]: _type = "Task" [ 1923.803481] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.811765] env[63379]: DEBUG oslo_vmware.api [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780311, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.001162] env[63379]: DEBUG nova.compute.manager [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Received event network-changed-62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1924.001382] env[63379]: DEBUG nova.compute.manager [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Refreshing instance network info cache due to event network-changed-62d6fce2-bf52-422e-8166-344c4fd61274. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1924.001602] env[63379]: DEBUG oslo_concurrency.lockutils [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] Acquiring lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.001777] env[63379]: DEBUG oslo_concurrency.lockutils [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] Acquired lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.001961] env[63379]: DEBUG nova.network.neutron [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Refreshing network info cache for port 62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1924.072933] env[63379]: DEBUG nova.objects.instance [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid d3c05ba6-b565-4432-b815-14ae0933853e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1924.148971] env[63379]: DEBUG oslo_concurrency.lockutils [None req-817ae6bf-4654-4f23-819c-4e068435f0d2 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.757s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.212190] env[63379]: DEBUG oslo_vmware.api [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Task: {'id': task-1780310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280554} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.212417] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1924.212628] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1924.212829] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1924.213017] env[63379]: INFO nova.compute.manager [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1924.213490] env[63379]: DEBUG oslo.service.loopingcall [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1924.213490] env[63379]: DEBUG nova.compute.manager [-] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1924.213641] env[63379]: DEBUG nova.network.neutron [-] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1924.232677] env[63379]: DEBUG nova.network.neutron [-] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1924.316648] env[63379]: DEBUG oslo_vmware.api [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780311, 'name': ReconfigVM_Task, 'duration_secs': 0.144179} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.316971] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369488', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'name': 'volume-6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2be6bdea-416e-4912-8930-3c4e4f194f99', 'attached_at': '', 'detached_at': '', 'volume_id': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2', 'serial': '6705e7c7-36df-4f63-9dd7-b53e2f4ca7d2'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1924.323426] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.323651] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.712555] env[63379]: DEBUG nova.network.neutron [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updated VIF entry in instance network info cache for port 62d6fce2-bf52-422e-8166-344c4fd61274. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1924.712932] env[63379]: DEBUG nova.network.neutron [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.734911] env[63379]: DEBUG nova.network.neutron [-] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.825608] env[63379]: DEBUG nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1924.859066] env[63379]: DEBUG nova.objects.instance [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'flavor' on Instance uuid 2be6bdea-416e-4912-8930-3c4e4f194f99 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1925.080261] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3c45dbb5-97eb-466f-8621-f99b8ca2bf7a tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.232s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.092760] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.093038] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.215944] env[63379]: DEBUG oslo_concurrency.lockutils [req-f11ea78a-b2c4-4781-90e1-c734549d4dc8 req-9c8b9cc3-51ef-4ec0-a99d-beb072a8e10c service nova] Releasing lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.237074] env[63379]: INFO nova.compute.manager [-] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Took 1.02 seconds to deallocate network for instance. [ 1925.348860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.349234] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.351029] env[63379]: INFO nova.compute.claims [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1925.596385] env[63379]: INFO nova.compute.manager [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Detaching volume dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c [ 1925.633773] env[63379]: INFO nova.virt.block_device [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Attempting to driver detach volume dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c from mountpoint /dev/sdc [ 1925.634059] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1925.634263] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369501', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'name': 'volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'serial': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1925.635191] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffaec76-6004-4521-888b-800f10d6c1fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.657741] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c15f548-8194-4fe3-b551-ff3135bcbb8a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.665097] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2621203a-c405-451e-bf20-f60223fe62c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.685212] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8a98ba-b197-4381-86cc-7aa1555ffde1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.700712] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] The volume has not been displaced from its original location: [datastore1] volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c/volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1925.706515] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfiguring VM instance instance-00000065 to detach disk 2002 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1925.706816] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a81c0904-f843-4853-b195-7871ca29613b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.725370] env[63379]: DEBUG oslo_vmware.api [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1925.725370] env[63379]: value = "task-1780312" [ 1925.725370] env[63379]: _type = "Task" [ 1925.725370] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.733110] env[63379]: DEBUG oslo_vmware.api [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.743164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.865024] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0552d305-33ff-4183-a037-0d6fbe75703d tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.235572] env[63379]: DEBUG oslo_vmware.api [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780312, 'name': ReconfigVM_Task, 'duration_secs': 0.254409} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.235881] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Reconfigured VM instance instance-00000065 to detach disk 2002 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1926.240655] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98310531-ab3c-4e17-8858-aa8ef3ae2e2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.256065] env[63379]: DEBUG oslo_vmware.api [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1926.256065] env[63379]: value = "task-1780313" [ 1926.256065] env[63379]: _type = "Task" [ 1926.256065] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.266022] env[63379]: DEBUG oslo_vmware.api [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.475780] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33da8ad-5a5b-4c3d-9fec-5f21be57b859 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.483689] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e80de6e-0f1a-4efb-8c1d-6ee15223e18f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.514298] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488b43fb-2c3d-4b3b-aeb8-7b9516e66b78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.521706] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6261023-69a2-4097-a6ad-5dbf98717555 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.534840] env[63379]: DEBUG nova.compute.provider_tree [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1926.766235] env[63379]: DEBUG oslo_vmware.api [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780313, 'name': ReconfigVM_Task, 'duration_secs': 0.216668} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.766592] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369501', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'name': 'volume-dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd3c05ba6-b565-4432-b815-14ae0933853e', 'attached_at': '', 'detached_at': '', 'volume_id': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c', 'serial': 'dec1aeed-ba0d-4ad4-9dfc-ec071c6b051c'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1926.903169] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.903408] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.903615] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "2be6bdea-416e-4912-8930-3c4e4f194f99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.903803] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.904048] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.906271] env[63379]: INFO nova.compute.manager [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Terminating instance [ 1926.908104] env[63379]: DEBUG nova.compute.manager [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1926.908252] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1926.911023] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad97a1c-6378-4da0-b79b-ddb74ce51dc8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.916971] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1926.917227] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2842d3ac-68d2-4fa4-bd3f-3de5c488e17b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.923488] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1926.923488] env[63379]: value = "task-1780314" [ 1926.923488] env[63379]: _type = "Task" [ 1926.923488] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.932479] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.038020] env[63379]: DEBUG nova.scheduler.client.report [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1927.309186] env[63379]: DEBUG nova.objects.instance [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'flavor' on Instance uuid d3c05ba6-b565-4432-b815-14ae0933853e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1927.434169] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780314, 'name': PowerOffVM_Task, 'duration_secs': 0.1712} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.434451] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1927.434624] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1927.434882] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac410269-5139-46b1-9b36-0e21346b1f35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.518204] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1927.518439] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1927.518630] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleting the datastore file [datastore1] 2be6bdea-416e-4912-8930-3c4e4f194f99 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1927.518926] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0574cb3-997d-45ca-a9d4-307844ab6b87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.526062] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1927.526062] env[63379]: value = "task-1780316" [ 1927.526062] env[63379]: _type = "Task" [ 1927.526062] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.533719] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.543542] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.544072] env[63379]: DEBUG nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1927.546712] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.804s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.546943] env[63379]: DEBUG nova.objects.instance [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lazy-loading 'resources' on Instance uuid b3f753e3-2ec6-4359-8de0-f9c771e274e5 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1928.036353] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.050068] env[63379]: DEBUG nova.compute.utils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1928.054168] env[63379]: DEBUG nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1928.054350] env[63379]: DEBUG nova.network.neutron [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1928.099882] env[63379]: DEBUG nova.policy [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99f3906f7b7e47a1a81c5c8f38d5b4ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '645f0e0a5e1a44d59ca9c85da49bb454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1928.180830] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a324c70c-8dcc-4a94-aeeb-c924542b68ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.190091] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ee12b3-6813-42d5-bb23-98401c6b5d16 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.221062] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7dfe0a2-3231-43dc-b761-a65eff16a61f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.228485] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62df3523-878c-4ebc-a1e4-1fa54b5e80f8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.241940] env[63379]: DEBUG nova.compute.provider_tree [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1928.316411] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d3fbf96-6bf4-4b62-8a6f-7d0faac3602c tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.223s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.373772] env[63379]: DEBUG nova.network.neutron [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Successfully created port: 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1928.537278] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.557008] env[63379]: DEBUG nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1928.744784] env[63379]: DEBUG nova.scheduler.client.report [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1929.037671] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.251206] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.270681] env[63379]: INFO nova.scheduler.client.report [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Deleted allocations for instance b3f753e3-2ec6-4359-8de0-f9c771e274e5 [ 1929.469124] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.469412] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.469635] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "d3c05ba6-b565-4432-b815-14ae0933853e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.469827] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.470027] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.472133] env[63379]: INFO nova.compute.manager [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Terminating instance [ 1929.473821] env[63379]: DEBUG nova.compute.manager [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1929.474072] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1929.474933] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1037cc-41af-42dd-b85a-5d00a258d51d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.482730] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1929.482961] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9451a47a-092d-4202-8c33-eb9d8cf06937 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.489847] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1929.489847] env[63379]: value = "task-1780317" [ 1929.489847] env[63379]: _type = "Task" [ 1929.489847] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.498130] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.537960] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.566458] env[63379]: DEBUG nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1929.591789] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1929.592066] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1929.592233] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1929.592426] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1929.592571] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1929.592721] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1929.592934] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1929.593109] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1929.593281] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1929.593447] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1929.593623] env[63379]: DEBUG nova.virt.hardware [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1929.594548] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46869077-50f7-414f-90a4-a7e3ccec5781 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.604278] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bbc827-3951-48d8-93d6-be331dd20cea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.759561] env[63379]: DEBUG nova.compute.manager [req-53066241-ae95-4f1b-accd-8b17d52418db req-8623afcf-de28-413a-bbde-0f93c2ef8575 service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Received event network-vif-plugged-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1929.759800] env[63379]: DEBUG oslo_concurrency.lockutils [req-53066241-ae95-4f1b-accd-8b17d52418db req-8623afcf-de28-413a-bbde-0f93c2ef8575 service nova] Acquiring lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.760058] env[63379]: DEBUG oslo_concurrency.lockutils [req-53066241-ae95-4f1b-accd-8b17d52418db req-8623afcf-de28-413a-bbde-0f93c2ef8575 service nova] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.760288] env[63379]: DEBUG oslo_concurrency.lockutils [req-53066241-ae95-4f1b-accd-8b17d52418db req-8623afcf-de28-413a-bbde-0f93c2ef8575 service nova] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.760463] env[63379]: DEBUG nova.compute.manager [req-53066241-ae95-4f1b-accd-8b17d52418db req-8623afcf-de28-413a-bbde-0f93c2ef8575 service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] No waiting events found dispatching network-vif-plugged-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1929.760633] env[63379]: WARNING nova.compute.manager [req-53066241-ae95-4f1b-accd-8b17d52418db req-8623afcf-de28-413a-bbde-0f93c2ef8575 service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Received unexpected event network-vif-plugged-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e for instance with vm_state building and task_state spawning. [ 1929.777523] env[63379]: DEBUG oslo_concurrency.lockutils [None req-dd5436ad-c4ff-4922-8055-189918c23dff tempest-ServerShowV247Test-1573948792 tempest-ServerShowV247Test-1573948792-project-member] Lock "b3f753e3-2ec6-4359-8de0-f9c771e274e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.731s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.852526] env[63379]: DEBUG nova.network.neutron [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Successfully updated port: 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1930.000752] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.038932] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.354776] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.354964] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.355142] env[63379]: DEBUG nova.network.neutron [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.501630] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780317, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.540298] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.888545] env[63379]: DEBUG nova.network.neutron [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1931.001408] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780317, 'name': PowerOffVM_Task, 'duration_secs': 1.400634} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.001737] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1931.001865] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1931.002143] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e167dcc7-669e-4f11-b7f1-cb05a0faad9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.040200] env[63379]: DEBUG oslo_vmware.api [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780316, 'name': DeleteDatastoreFile_Task, 'duration_secs': 3.292365} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.040571] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1931.040647] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1931.040835] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1931.041021] env[63379]: INFO nova.compute.manager [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Took 4.13 seconds to destroy the instance on the hypervisor. [ 1931.041571] env[63379]: DEBUG oslo.service.loopingcall [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.041571] env[63379]: DEBUG nova.compute.manager [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1931.041571] env[63379]: DEBUG nova.network.neutron [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1931.069244] env[63379]: DEBUG nova.network.neutron [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.219979] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1931.220216] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1931.220422] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Deleting the datastore file [datastore1] d3c05ba6-b565-4432-b815-14ae0933853e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1931.220670] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b23e881d-8e3b-4cc4-b7f7-e49ad0454233 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.227581] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for the task: (returnval){ [ 1931.227581] env[63379]: value = "task-1780319" [ 1931.227581] env[63379]: _type = "Task" [ 1931.227581] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.236431] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.356788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-48c17c3b-1197-46cb-a0f7-3671b2d82c7e-44cd89ca-ba87-42ee-bfba-e868680926c7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.357173] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-48c17c3b-1197-46cb-a0f7-3671b2d82c7e-44cd89ca-ba87-42ee-bfba-e868680926c7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.357443] env[63379]: DEBUG nova.objects.instance [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'flavor' on Instance uuid 48c17c3b-1197-46cb-a0f7-3671b2d82c7e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1931.575994] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.575994] env[63379]: DEBUG nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Instance network_info: |[{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1931.575994] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:9c:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1931.584704] env[63379]: DEBUG oslo.service.loopingcall [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.585597] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1931.585881] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9d05931-9735-4ba0-8288-70825dbf05d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.604560] env[63379]: DEBUG nova.compute.manager [req-c733e8ff-9d50-44fd-ba6a-1f520607d1c9 req-8d8b3ec2-9597-4844-9c56-d7a599163b4e service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Received event network-vif-deleted-f7fd4937-49e3-4d89-8fed-cc6c052fc1c0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1931.604852] env[63379]: INFO nova.compute.manager [req-c733e8ff-9d50-44fd-ba6a-1f520607d1c9 req-8d8b3ec2-9597-4844-9c56-d7a599163b4e service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Neutron deleted interface f7fd4937-49e3-4d89-8fed-cc6c052fc1c0; detaching it from the instance and deleting it from the info cache [ 1931.605090] env[63379]: DEBUG nova.network.neutron [req-c733e8ff-9d50-44fd-ba6a-1f520607d1c9 req-8d8b3ec2-9597-4844-9c56-d7a599163b4e service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.611499] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1931.611499] env[63379]: value = "task-1780320" [ 1931.611499] env[63379]: _type = "Task" [ 1931.611499] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.620322] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780320, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.739529] env[63379]: DEBUG oslo_vmware.api [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Task: {'id': task-1780319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162277} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.739828] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1931.740093] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1931.740226] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1931.740896] env[63379]: INFO nova.compute.manager [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1931.741240] env[63379]: DEBUG oslo.service.loopingcall [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.741677] env[63379]: DEBUG nova.compute.manager [-] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1931.741820] env[63379]: DEBUG nova.network.neutron [-] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1931.792963] env[63379]: DEBUG nova.compute.manager [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Received event network-changed-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1931.793193] env[63379]: DEBUG nova.compute.manager [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Refreshing instance network info cache due to event network-changed-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1931.793409] env[63379]: DEBUG oslo_concurrency.lockutils [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] Acquiring lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.793554] env[63379]: DEBUG oslo_concurrency.lockutils [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] Acquired lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.793717] env[63379]: DEBUG nova.network.neutron [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Refreshing network info cache for port 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1932.045870] env[63379]: DEBUG nova.objects.instance [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'pci_requests' on Instance uuid 48c17c3b-1197-46cb-a0f7-3671b2d82c7e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1932.075458] env[63379]: DEBUG nova.network.neutron [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.108331] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f9a6151-c3a4-4b0c-8f37-bb738672494a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.126609] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780320, 'name': CreateVM_Task, 'duration_secs': 0.484826} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.126861] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1932.130265] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24aea26b-f1e2-4d14-8265-172bfda87451 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.143524] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.143524] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.143841] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1932.144652] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b1625f8-f4d9-4d43-bc71-baf111b54b94 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.153193] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1932.153193] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f77ac7-b7f3-4c0c-b1cf-98431de2e6e3" [ 1932.153193] env[63379]: _type = "Task" [ 1932.153193] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.168022] env[63379]: DEBUG nova.compute.manager [req-c733e8ff-9d50-44fd-ba6a-1f520607d1c9 req-8d8b3ec2-9597-4844-9c56-d7a599163b4e service nova] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Detach interface failed, port_id=f7fd4937-49e3-4d89-8fed-cc6c052fc1c0, reason: Instance 2be6bdea-416e-4912-8930-3c4e4f194f99 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1932.169737] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f77ac7-b7f3-4c0c-b1cf-98431de2e6e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.548880] env[63379]: DEBUG nova.objects.base [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Object Instance<48c17c3b-1197-46cb-a0f7-3671b2d82c7e> lazy-loaded attributes: flavor,pci_requests {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1932.549964] env[63379]: DEBUG nova.network.neutron [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1932.580699] env[63379]: INFO nova.compute.manager [-] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Took 1.54 seconds to deallocate network for instance. [ 1932.614234] env[63379]: DEBUG nova.policy [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1932.667511] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f77ac7-b7f3-4c0c-b1cf-98431de2e6e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010194} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.667838] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.668092] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1932.668340] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.668773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.668773] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1932.669995] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fea5b13c-a945-4706-8c03-fb696b786b0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.680567] env[63379]: DEBUG nova.network.neutron [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updated VIF entry in instance network info cache for port 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1932.680922] env[63379]: DEBUG nova.network.neutron [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.682742] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1932.682924] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1932.689018] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65783ebc-5237-4b7c-ac6d-9f800d622ba3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.691862] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1932.691862] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5266a296-123d-9498-ad80-40c65157f55c" [ 1932.691862] env[63379]: _type = "Task" [ 1932.691862] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.704371] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5266a296-123d-9498-ad80-40c65157f55c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.884674] env[63379]: DEBUG nova.network.neutron [-] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.086672] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.087127] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.087239] env[63379]: DEBUG nova.objects.instance [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'resources' on Instance uuid 2be6bdea-416e-4912-8930-3c4e4f194f99 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1933.193031] env[63379]: DEBUG oslo_concurrency.lockutils [req-65984cf5-95b4-4c6c-a766-dd8e1a97ce14 req-f6a69737-e493-40c4-86d9-ad8bb08e940b service nova] Releasing lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.204067] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5266a296-123d-9498-ad80-40c65157f55c, 'name': SearchDatastore_Task, 'duration_secs': 0.011371} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.204990] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e47a622a-9666-499e-8330-3ca5aae258fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.213316] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1933.213316] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a70ce-a4ad-fb06-7aaf-ad6484586d6e" [ 1933.213316] env[63379]: _type = "Task" [ 1933.213316] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.222650] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a70ce-a4ad-fb06-7aaf-ad6484586d6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.387803] env[63379]: INFO nova.compute.manager [-] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Took 1.65 seconds to deallocate network for instance. [ 1933.719340] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54315549-c284-4664-a025-3d39c2ba4be1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.725715] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a70ce-a4ad-fb06-7aaf-ad6484586d6e, 'name': SearchDatastore_Task, 'duration_secs': 0.009301} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.726430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.726760] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1933.727082] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fe2e5ca-6a34-4e44-ad76-54e80a05205b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.733510] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761c2183-bdb8-4e8c-adb2-294a832fdc07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.737722] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1933.737722] env[63379]: value = "task-1780321" [ 1933.737722] env[63379]: _type = "Task" [ 1933.737722] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.767903] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c75a9c2-ed90-4584-b7f0-24614a3fdf6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.773790] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.779100] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1958952d-ba4d-44de-a421-50797e00508c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.793239] env[63379]: DEBUG nova.compute.provider_tree [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1933.899723] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.039483] env[63379]: DEBUG nova.compute.manager [req-1448e6dd-b5d8-4283-8962-671dd4877dcf req-4ddd0bc1-65a1-47f7-8d3c-f0984d8b7ca4 service nova] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Received event network-vif-deleted-1b29b7f2-a269-473e-a89e-a072a3155131 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1934.193195] env[63379]: DEBUG nova.network.neutron [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Successfully updated port: 44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1934.249845] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780321, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.318083] env[63379]: ERROR nova.scheduler.client.report [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [req-8e314915-382b-42af-90cb-76cdbd0fe004] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8e314915-382b-42af-90cb-76cdbd0fe004"}]} [ 1934.334361] env[63379]: DEBUG nova.scheduler.client.report [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1934.352571] env[63379]: DEBUG nova.scheduler.client.report [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1934.352571] env[63379]: DEBUG nova.compute.provider_tree [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1934.364059] env[63379]: DEBUG nova.scheduler.client.report [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1934.385273] env[63379]: DEBUG nova.scheduler.client.report [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1934.517016] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f6e631-de90-48bc-8109-81651ef2ab7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.522178] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc07ede-9dd4-4ee5-b545-f68d4763ab52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.556171] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfd9635-ec0e-4919-9b2f-e0be53037dae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.561666] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.561906] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.568506] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db2da4c-43db-48f1-a20c-83ea1a082bfd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.584807] env[63379]: DEBUG nova.compute.provider_tree [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1934.698825] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.699072] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.699264] env[63379]: DEBUG nova.network.neutron [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1934.748857] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.996814} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.749148] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1934.749366] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1934.749612] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7fcc2d5-0c5e-4a97-9ba4-52a4e6287289 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.755278] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1934.755278] env[63379]: value = "task-1780322" [ 1934.755278] env[63379]: _type = "Task" [ 1934.755278] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.762304] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.064904] env[63379]: DEBUG nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1935.128146] env[63379]: DEBUG nova.scheduler.client.report [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1935.128319] env[63379]: DEBUG nova.compute.provider_tree [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 150 to 151 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1935.128524] env[63379]: DEBUG nova.compute.provider_tree [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1935.238397] env[63379]: WARNING nova.network.neutron [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] 501025fb-aee7-4f74-80fd-af4976529317 already exists in list: networks containing: ['501025fb-aee7-4f74-80fd-af4976529317']. ignoring it [ 1935.266237] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070879} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.266526] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1935.267308] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e6c62b-0220-4952-9b78-9fe04e47feb4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.291186] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1935.291475] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-220a4a71-4e46-48c5-8743-2fbac636ee4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.313211] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1935.313211] env[63379]: value = "task-1780323" [ 1935.313211] env[63379]: _type = "Task" [ 1935.313211] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.321227] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.531932] env[63379]: DEBUG nova.network.neutron [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44cd89ca-ba87-42ee-bfba-e868680926c7", "address": "fa:16:3e:51:f5:6a", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44cd89ca-ba", "ovs_interfaceid": "44cd89ca-ba87-42ee-bfba-e868680926c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.586717] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.633733] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.546s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.635661] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.737s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.635957] env[63379]: DEBUG nova.objects.instance [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lazy-loading 'resources' on Instance uuid d3c05ba6-b565-4432-b815-14ae0933853e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1935.655264] env[63379]: INFO nova.scheduler.client.report [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted allocations for instance 2be6bdea-416e-4912-8930-3c4e4f194f99 [ 1935.823309] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.034600] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.035328] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.035494] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.036417] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd1481c-e583-4ffe-b9ff-f2b4634ab4af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.053356] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1936.053605] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1936.053782] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1936.054052] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1936.054162] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1936.054318] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1936.054632] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1936.056021] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1936.056021] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1936.056021] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1936.056021] env[63379]: DEBUG nova.virt.hardware [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1936.062011] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Reconfiguring VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1936.062324] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52e2c062-5c8d-4a90-8f39-3c777fcfc11f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.076721] env[63379]: DEBUG nova.compute.manager [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-vif-plugged-44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1936.076935] env[63379]: DEBUG oslo_concurrency.lockutils [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] Acquiring lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.077181] env[63379]: DEBUG oslo_concurrency.lockutils [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.077373] env[63379]: DEBUG oslo_concurrency.lockutils [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.077559] env[63379]: DEBUG nova.compute.manager [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] No waiting events found dispatching network-vif-plugged-44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1936.077784] env[63379]: WARNING nova.compute.manager [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received unexpected event network-vif-plugged-44cd89ca-ba87-42ee-bfba-e868680926c7 for instance with vm_state active and task_state None. [ 1936.077978] env[63379]: DEBUG nova.compute.manager [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-changed-44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1936.078164] env[63379]: DEBUG nova.compute.manager [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing instance network info cache due to event network-changed-44cd89ca-ba87-42ee-bfba-e868680926c7. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1936.078378] env[63379]: DEBUG oslo_concurrency.lockutils [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.078536] env[63379]: DEBUG oslo_concurrency.lockutils [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.078697] env[63379]: DEBUG nova.network.neutron [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing network info cache for port 44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1936.086378] env[63379]: DEBUG oslo_vmware.api [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1936.086378] env[63379]: value = "task-1780324" [ 1936.086378] env[63379]: _type = "Task" [ 1936.086378] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.095389] env[63379]: DEBUG oslo_vmware.api [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780324, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.163231] env[63379]: DEBUG oslo_concurrency.lockutils [None req-3f3e5f94-4416-4ed9-b88f-6d04d9b9a0c4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "2be6bdea-416e-4912-8930-3c4e4f194f99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.260s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.241097] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25540b07-e233-4898-860e-824fa8b99030 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.251113] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d81900-f8d6-401b-8151-f5bc1c9018b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.281705] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c630995-bdc6-4eba-b31e-6e0bff78c1c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.289240] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b37ad8-fdf4-40b5-88e8-1132b7a67739 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.303060] env[63379]: DEBUG nova.compute.provider_tree [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1936.323041] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780323, 'name': ReconfigVM_Task, 'duration_secs': 0.65619} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.323351] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfigured VM instance instance-0000006c to attach disk [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1936.324014] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e236735-9b14-447e-8b25-0176ea9ff5a9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.330884] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1936.330884] env[63379]: value = "task-1780325" [ 1936.330884] env[63379]: _type = "Task" [ 1936.330884] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.338582] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780325, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.598742] env[63379]: DEBUG oslo_vmware.api [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.805193] env[63379]: DEBUG nova.network.neutron [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updated VIF entry in instance network info cache for port 44cd89ca-ba87-42ee-bfba-e868680926c7. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1936.805796] env[63379]: DEBUG nova.network.neutron [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44cd89ca-ba87-42ee-bfba-e868680926c7", "address": "fa:16:3e:51:f5:6a", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44cd89ca-ba", "ovs_interfaceid": "44cd89ca-ba87-42ee-bfba-e868680926c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.808130] env[63379]: DEBUG nova.scheduler.client.report [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1936.841240] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780325, 'name': Rename_Task, 'duration_secs': 0.134629} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.841525] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1936.841773] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4287d59b-9b34-441e-9372-b0396261ab6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.847509] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1936.847509] env[63379]: value = "task-1780326" [ 1936.847509] env[63379]: _type = "Task" [ 1936.847509] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.856190] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.100722] env[63379]: DEBUG oslo_vmware.api [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780324, 'name': ReconfigVM_Task, 'duration_secs': 0.772358} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.101568] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.101926] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Reconfigured VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1937.313126] env[63379]: DEBUG oslo_concurrency.lockutils [req-300eb3de-701b-417d-87f2-6e52217e84ef req-2ee9d38a-b9fb-48a5-ae24-69cc7bd89650 service nova] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.313992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.316430] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.730s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.318061] env[63379]: INFO nova.compute.claims [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1937.339823] env[63379]: INFO nova.scheduler.client.report [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Deleted allocations for instance d3c05ba6-b565-4432-b815-14ae0933853e [ 1937.357150] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780326, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.610123] env[63379]: DEBUG oslo_concurrency.lockutils [None req-79f5d65b-0ba2-48a4-a627-121c566390d6 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-48c17c3b-1197-46cb-a0f7-3671b2d82c7e-44cd89ca-ba87-42ee-bfba-e868680926c7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.253s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.848544] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5f50c458-8d60-4d38-8e68-c9f82f6e3239 tempest-AttachVolumeTestJSON-841781366 tempest-AttachVolumeTestJSON-841781366-project-member] Lock "d3c05ba6-b565-4432-b815-14ae0933853e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.379s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.859291] env[63379]: DEBUG oslo_vmware.api [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780326, 'name': PowerOnVM_Task, 'duration_secs': 0.518234} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.860112] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1937.860341] env[63379]: INFO nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1937.860528] env[63379]: DEBUG nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1937.861319] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594dffdb-be86-4677-8028-afa6cec4e591 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.011620] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "e1681d89-2f55-47b7-9962-55aa169b3d0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.011882] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1938.382446] env[63379]: INFO nova.compute.manager [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Took 13.05 seconds to build instance. [ 1938.434051] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad44d80-d751-4536-b21f-9a2fd6b2e0c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.442581] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70eaf98-aa52-4d7f-80d7-5c79ba95e909 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.474252] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b77a091-5944-4beb-b0bd-aa2d1609a4ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.482298] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a183a1-9cfd-4f3d-b917-bc3701e42c8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.496067] env[63379]: DEBUG nova.compute.provider_tree [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1938.513714] env[63379]: DEBUG nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1938.884478] env[63379]: DEBUG oslo_concurrency.lockutils [None req-701bcde6-d56f-46ad-9191-288364bc01e1 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.561s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.999136] env[63379]: DEBUG nova.scheduler.client.report [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1939.034623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.097176] env[63379]: DEBUG nova.compute.manager [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Received event network-changed-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1939.097386] env[63379]: DEBUG nova.compute.manager [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Refreshing instance network info cache due to event network-changed-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1939.097796] env[63379]: DEBUG oslo_concurrency.lockutils [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] Acquiring lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.097982] env[63379]: DEBUG oslo_concurrency.lockutils [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] Acquired lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.098201] env[63379]: DEBUG nova.network.neutron [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Refreshing network info cache for port 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1939.117848] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-48c17c3b-1197-46cb-a0f7-3671b2d82c7e-44cd89ca-ba87-42ee-bfba-e868680926c7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.118123] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-48c17c3b-1197-46cb-a0f7-3671b2d82c7e-44cd89ca-ba87-42ee-bfba-e868680926c7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.504694] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.505240] env[63379]: DEBUG nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1939.507969] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.474s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.509343] env[63379]: INFO nova.compute.claims [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1939.622183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.622183] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.623049] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ef1291-e04b-4b10-9945-bfdc109aef0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.643743] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6538d95c-3d57-4286-adcf-fe92c2a80d22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.673576] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Reconfiguring VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1939.673932] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bab2c552-281a-4575-ada7-8f83fa6815f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.695959] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1939.695959] env[63379]: value = "task-1780328" [ 1939.695959] env[63379]: _type = "Task" [ 1939.695959] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.703941] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.840014] env[63379]: DEBUG nova.network.neutron [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updated VIF entry in instance network info cache for port 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1939.840481] env[63379]: DEBUG nova.network.neutron [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.013864] env[63379]: DEBUG nova.compute.utils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1940.017547] env[63379]: DEBUG nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1940.017725] env[63379]: DEBUG nova.network.neutron [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1940.057383] env[63379]: DEBUG nova.policy [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22787c1f10df433b9f2db1de154f6778', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28f7e38c300546a2a7a033cb12c7f89a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1940.205709] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.318189] env[63379]: DEBUG nova.network.neutron [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Successfully created port: 7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1940.342913] env[63379]: DEBUG oslo_concurrency.lockutils [req-70c446a9-937a-49c4-9692-1e3407e3f8d8 req-d949ad0e-04d7-418e-a696-fab996c9216c service nova] Releasing lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.524387] env[63379]: DEBUG nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1940.636550] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e0ad02-8845-420f-a93e-13193e976745 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.644631] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a5a7cc-e456-40fc-8ece-5699edfc4cc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.368532] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01779d4-a312-4060-87fe-38d564279fc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.378021] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.379504] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498aa1c5-2edf-4049-9c84-8e519d5edcbc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.391861] env[63379]: DEBUG nova.compute.provider_tree [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1941.698404] env[63379]: DEBUG nova.compute.manager [req-6a91dde9-ebe6-4046-8a90-4aef4805fbde req-6e3ef8e0-3cfa-4aba-b240-0c85855f05ac service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received event network-vif-plugged-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1941.698676] env[63379]: DEBUG oslo_concurrency.lockutils [req-6a91dde9-ebe6-4046-8a90-4aef4805fbde req-6e3ef8e0-3cfa-4aba-b240-0c85855f05ac service nova] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.698860] env[63379]: DEBUG oslo_concurrency.lockutils [req-6a91dde9-ebe6-4046-8a90-4aef4805fbde req-6e3ef8e0-3cfa-4aba-b240-0c85855f05ac service nova] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.698997] env[63379]: DEBUG oslo_concurrency.lockutils [req-6a91dde9-ebe6-4046-8a90-4aef4805fbde req-6e3ef8e0-3cfa-4aba-b240-0c85855f05ac service nova] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.699216] env[63379]: DEBUG nova.compute.manager [req-6a91dde9-ebe6-4046-8a90-4aef4805fbde req-6e3ef8e0-3cfa-4aba-b240-0c85855f05ac service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] No waiting events found dispatching network-vif-plugged-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1941.699386] env[63379]: WARNING nova.compute.manager [req-6a91dde9-ebe6-4046-8a90-4aef4805fbde req-6e3ef8e0-3cfa-4aba-b240-0c85855f05ac service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received unexpected event network-vif-plugged-7dc69df5-b3d2-494c-b700-584c31779f9a for instance with vm_state building and task_state spawning. [ 1941.801438] env[63379]: DEBUG nova.network.neutron [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Successfully updated port: 7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1941.866126] env[63379]: DEBUG nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1941.874011] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.894265] env[63379]: DEBUG nova.scheduler.client.report [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1941.899266] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1941.899502] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1941.899661] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1941.899854] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1941.899998] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1941.900193] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1941.900400] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1941.900563] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1941.900731] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1941.900930] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1941.901132] env[63379]: DEBUG nova.virt.hardware [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1941.902019] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5561a9-f5ed-4443-8d74-63e6d13fda42 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.909983] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6339254-1b82-442b-b708-52971dc18202 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.304222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.304404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.304537] env[63379]: DEBUG nova.network.neutron [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1942.371534] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.406196] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.898s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.406689] env[63379]: DEBUG nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1942.840184] env[63379]: DEBUG nova.network.neutron [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1942.872225] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.911857] env[63379]: DEBUG nova.compute.utils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1942.913262] env[63379]: DEBUG nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1942.913429] env[63379]: DEBUG nova.network.neutron [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1942.953285] env[63379]: DEBUG nova.policy [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '756ff556130a4855b461899fece1e1fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3363a90de2d4d5988ddd03974c10d0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1942.969387] env[63379]: DEBUG nova.network.neutron [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": "7dc69df5-b3d2-494c-b700-584c31779f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.234834] env[63379]: DEBUG nova.network.neutron [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Successfully created port: a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1943.375461] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.407630] env[63379]: DEBUG oslo_concurrency.lockutils [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.408075] env[63379]: DEBUG oslo_concurrency.lockutils [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.415475] env[63379]: DEBUG nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1943.472761] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.472761] env[63379]: DEBUG nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Instance network_info: |[{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": "7dc69df5-b3d2-494c-b700-584c31779f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1943.472922] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:a2:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dc69df5-b3d2-494c-b700-584c31779f9a', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1943.480447] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating folder: Project (28f7e38c300546a2a7a033cb12c7f89a). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1943.480984] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6845778e-2f2c-4311-894c-5c658af8016d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.491921] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Created folder: Project (28f7e38c300546a2a7a033cb12c7f89a) in parent group-v369214. [ 1943.492123] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating folder: Instances. Parent ref: group-v369504. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1943.492347] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d9c37d6-f9d4-42d2-91e8-a55c1fa72a87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.501201] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Created folder: Instances in parent group-v369504. [ 1943.501426] env[63379]: DEBUG oslo.service.loopingcall [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1943.501605] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1943.501796] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36437ea3-0a2f-4326-9974-f7b6c6ab9a9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.519709] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1943.519709] env[63379]: value = "task-1780332" [ 1943.519709] env[63379]: _type = "Task" [ 1943.519709] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.526789] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780332, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.723312] env[63379]: DEBUG nova.compute.manager [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received event network-changed-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1943.723520] env[63379]: DEBUG nova.compute.manager [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Refreshing instance network info cache due to event network-changed-7dc69df5-b3d2-494c-b700-584c31779f9a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1943.723737] env[63379]: DEBUG oslo_concurrency.lockutils [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] Acquiring lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.723885] env[63379]: DEBUG oslo_concurrency.lockutils [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] Acquired lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.724063] env[63379]: DEBUG nova.network.neutron [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Refreshing network info cache for port 7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1943.875246] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.911621] env[63379]: DEBUG nova.compute.utils [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1944.031051] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780332, 'name': CreateVM_Task, 'duration_secs': 0.507352} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.031200] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1944.031917] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.032115] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.032432] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1944.032681] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceca036d-47f1-4851-a14e-4bdc558dbde7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.037158] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1944.037158] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525b1b33-0a4d-d9af-5788-06f97123fd60" [ 1944.037158] env[63379]: _type = "Task" [ 1944.037158] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.044145] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525b1b33-0a4d-d9af-5788-06f97123fd60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.375310] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.415014] env[63379]: DEBUG oslo_concurrency.lockutils [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.424162] env[63379]: DEBUG nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1944.429660] env[63379]: DEBUG nova.network.neutron [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updated VIF entry in instance network info cache for port 7dc69df5-b3d2-494c-b700-584c31779f9a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1944.430019] env[63379]: DEBUG nova.network.neutron [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": "7dc69df5-b3d2-494c-b700-584c31779f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.452987] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1944.453262] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1944.453425] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1944.453613] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1944.453761] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1944.453909] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1944.454141] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1944.454310] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1944.454476] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1944.454641] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1944.454815] env[63379]: DEBUG nova.virt.hardware [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1944.455903] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f298c421-7e89-4969-ba78-308ffe0e76cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.463566] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416277bd-cddc-4491-b09c-35d0dd233e0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.547592] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525b1b33-0a4d-d9af-5788-06f97123fd60, 'name': SearchDatastore_Task, 'duration_secs': 0.009211} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.547927] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1944.548178] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1944.548418] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.548572] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.548754] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1944.549036] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d83aef85-4eee-4e53-9f9e-8f03dcf7115d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.557234] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1944.557413] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1944.558152] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc7ba174-1c82-4142-8af7-8e5b1cf8d874 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.563334] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1944.563334] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0cdf7-74b0-f3bf-7c90-f20463d1c1ef" [ 1944.563334] env[63379]: _type = "Task" [ 1944.563334] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.571223] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0cdf7-74b0-f3bf-7c90-f20463d1c1ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.620492] env[63379]: DEBUG nova.compute.manager [req-9567c417-1e22-4d6b-87da-1d460715e589 req-9d068b55-59e1-497e-bfda-3900504320aa service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Received event network-vif-plugged-a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1944.620668] env[63379]: DEBUG oslo_concurrency.lockutils [req-9567c417-1e22-4d6b-87da-1d460715e589 req-9d068b55-59e1-497e-bfda-3900504320aa service nova] Acquiring lock "e1681d89-2f55-47b7-9962-55aa169b3d0a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.620851] env[63379]: DEBUG oslo_concurrency.lockutils [req-9567c417-1e22-4d6b-87da-1d460715e589 req-9d068b55-59e1-497e-bfda-3900504320aa service nova] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.621072] env[63379]: DEBUG oslo_concurrency.lockutils [req-9567c417-1e22-4d6b-87da-1d460715e589 req-9d068b55-59e1-497e-bfda-3900504320aa service nova] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.621266] env[63379]: DEBUG nova.compute.manager [req-9567c417-1e22-4d6b-87da-1d460715e589 req-9d068b55-59e1-497e-bfda-3900504320aa service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] No waiting events found dispatching network-vif-plugged-a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1944.621441] env[63379]: WARNING nova.compute.manager [req-9567c417-1e22-4d6b-87da-1d460715e589 req-9d068b55-59e1-497e-bfda-3900504320aa service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Received unexpected event network-vif-plugged-a482c861-81a4-437a-a78a-27d652a2e57d for instance with vm_state building and task_state spawning. [ 1944.701180] env[63379]: DEBUG nova.network.neutron [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Successfully updated port: a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1944.878550] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.933170] env[63379]: DEBUG oslo_concurrency.lockutils [req-bf6c74fd-2a50-40f1-af8b-158bd6d3437a req-941a2ab1-db82-4eb0-901f-0ff16efe6709 service nova] Releasing lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.074997] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0cdf7-74b0-f3bf-7c90-f20463d1c1ef, 'name': SearchDatastore_Task, 'duration_secs': 0.009611} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.075653] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ae7b863-0a98-4e20-a591-bd4e590a4fe7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.082026] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1945.082026] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52eaf499-1f28-092b-87be-3226d53e6d2d" [ 1945.082026] env[63379]: _type = "Task" [ 1945.082026] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.091996] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52eaf499-1f28-092b-87be-3226d53e6d2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.204164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.204334] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.204541] env[63379]: DEBUG nova.network.neutron [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1945.376997] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.478683] env[63379]: DEBUG oslo_concurrency.lockutils [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.479245] env[63379]: DEBUG oslo_concurrency.lockutils [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.479245] env[63379]: INFO nova.compute.manager [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Attaching volume 486e6792-4026-4427-b9d1-876825eb94eb to /dev/sdb [ 1945.513340] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea8225d-2278-450c-9d56-2bc24ffcf2d3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.518206] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0359d2-19e5-4481-bf0f-abde6e9ee2e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.539871] env[63379]: DEBUG nova.virt.block_device [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Updating existing volume attachment record: 632a07b5-7c14-4f1e-858d-3bd379ef9ba5 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1945.593064] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52eaf499-1f28-092b-87be-3226d53e6d2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010506} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.593064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.593064] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1945.593332] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09021b98-52d6-4c6e-b694-dcc56274012c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.599345] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1945.599345] env[63379]: value = "task-1780334" [ 1945.599345] env[63379]: _type = "Task" [ 1945.599345] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.606942] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.738314] env[63379]: DEBUG nova.network.neutron [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1945.877857] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.890377] env[63379]: DEBUG nova.network.neutron [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Updating instance_info_cache with network_info: [{"id": "a482c861-81a4-437a-a78a-27d652a2e57d", "address": "fa:16:3e:3a:8d:40", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa482c861-81", "ovs_interfaceid": "a482c861-81a4-437a-a78a-27d652a2e57d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.109921] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780334, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.379598] env[63379]: DEBUG oslo_vmware.api [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780328, 'name': ReconfigVM_Task, 'duration_secs': 6.185108} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.379926] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.380104] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Reconfigured VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1946.392891] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.393207] env[63379]: DEBUG nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Instance network_info: |[{"id": "a482c861-81a4-437a-a78a-27d652a2e57d", "address": "fa:16:3e:3a:8d:40", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa482c861-81", "ovs_interfaceid": "a482c861-81a4-437a-a78a-27d652a2e57d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1946.393670] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:8d:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a482c861-81a4-437a-a78a-27d652a2e57d', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1946.401283] env[63379]: DEBUG oslo.service.loopingcall [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1946.401520] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1946.401728] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a57b3f8-251d-47d0-b0c6-b864001e822d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.423823] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1946.423823] env[63379]: value = "task-1780336" [ 1946.423823] env[63379]: _type = "Task" [ 1946.423823] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.431773] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780336, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.610458] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.926848} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.610727] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1946.610969] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1946.611250] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da5c07f4-2ac3-464c-a03b-c9adcdc7e000 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.617567] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1946.617567] env[63379]: value = "task-1780337" [ 1946.617567] env[63379]: _type = "Task" [ 1946.617567] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.625247] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.650386] env[63379]: DEBUG nova.compute.manager [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Received event network-changed-a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1946.650590] env[63379]: DEBUG nova.compute.manager [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Refreshing instance network info cache due to event network-changed-a482c861-81a4-437a-a78a-27d652a2e57d. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1946.650805] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] Acquiring lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.650950] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] Acquired lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.651129] env[63379]: DEBUG nova.network.neutron [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Refreshing network info cache for port a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1946.933371] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780336, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.127151] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06232} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.127495] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1947.128319] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b426167e-e868-4a57-bfa5-849126065d97 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.150397] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1947.150653] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36b5e8b9-99e8-4d6f-88a9-b3486f516553 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.171484] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1947.171484] env[63379]: value = "task-1780338" [ 1947.171484] env[63379]: _type = "Task" [ 1947.171484] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.180133] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.436112] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780336, 'name': CreateVM_Task, 'duration_secs': 0.512541} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.436372] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1947.438864] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.438864] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.438864] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1947.438864] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dfd9644-dbdd-4c0c-9626-df16cc6ebe95 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.443626] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1947.443626] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e7b6cc-29aa-6ea3-be78-7b5b3dbb9010" [ 1947.443626] env[63379]: _type = "Task" [ 1947.443626] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.452173] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e7b6cc-29aa-6ea3-be78-7b5b3dbb9010, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.453072] env[63379]: DEBUG nova.network.neutron [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Updated VIF entry in instance network info cache for port a482c861-81a4-437a-a78a-27d652a2e57d. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1947.453461] env[63379]: DEBUG nova.network.neutron [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Updating instance_info_cache with network_info: [{"id": "a482c861-81a4-437a-a78a-27d652a2e57d", "address": "fa:16:3e:3a:8d:40", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa482c861-81", "ovs_interfaceid": "a482c861-81a4-437a-a78a-27d652a2e57d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.681527] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780338, 'name': ReconfigVM_Task, 'duration_secs': 0.260318} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.681829] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1947.682845] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ba36528-1481-427b-894b-03418a682981 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.689647] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1947.689647] env[63379]: value = "task-1780339" [ 1947.689647] env[63379]: _type = "Task" [ 1947.689647] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.697225] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780339, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.702730] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.702904] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.703231] env[63379]: DEBUG nova.network.neutron [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1947.957426] env[63379]: DEBUG oslo_concurrency.lockutils [req-2a27f550-daad-4af9-987c-257388669a9d req-657d6f08-d25b-4162-a5de-980de09e06c6 service nova] Releasing lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.958865] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e7b6cc-29aa-6ea3-be78-7b5b3dbb9010, 'name': SearchDatastore_Task, 'duration_secs': 0.010197} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.959323] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.959557] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1947.959794] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.959943] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.960141] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1947.960411] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b20d1f3-df9c-459b-8300-0ffbcc6c3d7b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.988642] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1947.988899] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1947.989596] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f458adb9-b4f5-487b-b776-faa53444fa75 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.997325] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1947.997325] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526db152-69eb-2a85-32f1-9493346adef7" [ 1947.997325] env[63379]: _type = "Task" [ 1947.997325] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.005590] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526db152-69eb-2a85-32f1-9493346adef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.055796] env[63379]: DEBUG nova.compute.manager [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1948.056009] env[63379]: DEBUG nova.compute.manager [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing instance network info cache due to event network-changed-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1948.056242] env[63379]: DEBUG oslo_concurrency.lockutils [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] Acquiring lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.200862] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780339, 'name': Rename_Task, 'duration_secs': 0.14088} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.201228] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1948.201572] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03c4e247-ab9a-46b4-bbf6-77eaa3a7d2aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.208412] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1948.208412] env[63379]: value = "task-1780341" [ 1948.208412] env[63379]: _type = "Task" [ 1948.208412] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.216544] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.435592] env[63379]: INFO nova.network.neutron [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Port 44cd89ca-ba87-42ee-bfba-e868680926c7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1948.435996] env[63379]: DEBUG nova.network.neutron [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.509791] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526db152-69eb-2a85-32f1-9493346adef7, 'name': SearchDatastore_Task, 'duration_secs': 0.027365} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.511020] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89c2d04b-4d07-44cb-8ec4-8a05681e4b44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.517218] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1948.517218] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52119d73-f3e7-2965-163b-faf62ad40c79" [ 1948.517218] env[63379]: _type = "Task" [ 1948.517218] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.525880] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52119d73-f3e7-2965-163b-faf62ad40c79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.718241] env[63379]: DEBUG oslo_vmware.api [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780341, 'name': PowerOnVM_Task, 'duration_secs': 0.46539} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.718523] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1948.718726] env[63379]: INFO nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Took 6.85 seconds to spawn the instance on the hypervisor. [ 1948.718933] env[63379]: DEBUG nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1948.719734] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e554f534-e143-4498-9395-1a6ef4c4157b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.764566] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-d4988643-18ff-44c8-8363-e0de43da2abe-44cd89ca-ba87-42ee-bfba-e868680926c7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.764861] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-d4988643-18ff-44c8-8363-e0de43da2abe-44cd89ca-ba87-42ee-bfba-e868680926c7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.765705] env[63379]: DEBUG nova.objects.instance [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'flavor' on Instance uuid d4988643-18ff-44c8-8363-e0de43da2abe {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.941768] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.944106] env[63379]: DEBUG oslo_concurrency.lockutils [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] Acquired lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.944383] env[63379]: DEBUG nova.network.neutron [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Refreshing network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1949.028629] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52119d73-f3e7-2965-163b-faf62ad40c79, 'name': SearchDatastore_Task, 'duration_secs': 0.017351} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.028951] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.029241] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] e1681d89-2f55-47b7-9962-55aa169b3d0a/e1681d89-2f55-47b7-9962-55aa169b3d0a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1949.029511] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3326b2e6-4a54-4e55-bd61-672177d287d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.039617] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1949.039617] env[63379]: value = "task-1780342" [ 1949.039617] env[63379]: _type = "Task" [ 1949.039617] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.047105] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.239132] env[63379]: INFO nova.compute.manager [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Took 13.67 seconds to build instance. [ 1949.384252] env[63379]: DEBUG nova.objects.instance [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'pci_requests' on Instance uuid d4988643-18ff-44c8-8363-e0de43da2abe {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1949.447215] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b5ab3775-1500-42a5-a4e6-0925d232a674 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-48c17c3b-1197-46cb-a0f7-3671b2d82c7e-44cd89ca-ba87-42ee-bfba-e868680926c7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.329s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.554020] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780342, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.713773] env[63379]: DEBUG nova.network.neutron [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updated VIF entry in instance network info cache for port 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1949.714160] env[63379]: DEBUG nova.network.neutron [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [{"id": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "address": "fa:16:3e:c6:a7:ff", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b8a4c15-27", "ovs_interfaceid": "4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.740957] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ba93284e-d05d-44c9-828a-ecbc7b6c1282 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.179s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.887367] env[63379]: DEBUG nova.objects.base [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1949.887600] env[63379]: DEBUG nova.network.neutron [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1949.961711] env[63379]: DEBUG nova.policy [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cbf26808a73470898829b58491e7c6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edb0d4b37a67492f9e0275b341e80cc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1950.051210] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.853804} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.051529] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] e1681d89-2f55-47b7-9962-55aa169b3d0a/e1681d89-2f55-47b7-9962-55aa169b3d0a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1950.051712] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1950.052114] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc9859cf-60c3-4e6a-86f8-9da2fdd14a29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.059980] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1950.059980] env[63379]: value = "task-1780343" [ 1950.059980] env[63379]: _type = "Task" [ 1950.059980] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.070630] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780343, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.085582] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1950.085845] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369507', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'name': 'volume-486e6792-4026-4427-b9d1-876825eb94eb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6', 'attached_at': '', 'detached_at': '', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'serial': '486e6792-4026-4427-b9d1-876825eb94eb'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1950.087328] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46eceeee-3ca1-4ec9-8efe-31a10e97e502 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.108069] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6f4dfe-d373-4aff-ae49-0a8bbbdd49bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.133389] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-486e6792-4026-4427-b9d1-876825eb94eb/volume-486e6792-4026-4427-b9d1-876825eb94eb.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1950.133687] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-364398e7-0121-4813-87fa-a6d8deaea0fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.151981] env[63379]: DEBUG oslo_vmware.api [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1950.151981] env[63379]: value = "task-1780344" [ 1950.151981] env[63379]: _type = "Task" [ 1950.151981] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.160591] env[63379]: DEBUG oslo_vmware.api [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.217477] env[63379]: DEBUG oslo_concurrency.lockutils [req-5c305deb-f19d-4f1b-b352-77c989dfd8b3 req-fa1eef42-4805-4a28-ba71-19a7293940b4 service nova] Releasing lock "refresh_cache-48c17c3b-1197-46cb-a0f7-3671b2d82c7e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.366328] env[63379]: DEBUG nova.compute.manager [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1950.366543] env[63379]: DEBUG nova.compute.manager [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing instance network info cache due to event network-changed-41bdc6f8-c059-49a5-86a4-a7a03cfe0300. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1950.366777] env[63379]: DEBUG oslo_concurrency.lockutils [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.366929] env[63379]: DEBUG oslo_concurrency.lockutils [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.367213] env[63379]: DEBUG nova.network.neutron [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing network info cache for port 41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1950.570069] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780343, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135677} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.570069] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1950.570809] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711665c9-7393-4cd0-9946-5f3eaf04c71b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.592939] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] e1681d89-2f55-47b7-9962-55aa169b3d0a/e1681d89-2f55-47b7-9962-55aa169b3d0a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1950.593303] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80d5bebd-3787-4c42-bc77-ff3b30d91555 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.611928] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1950.611928] env[63379]: value = "task-1780345" [ 1950.611928] env[63379]: _type = "Task" [ 1950.611928] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.621170] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780345, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.662138] env[63379]: DEBUG oslo_vmware.api [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.951708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.951950] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.102996] env[63379]: DEBUG nova.network.neutron [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updated VIF entry in instance network info cache for port 41bdc6f8-c059-49a5-86a4-a7a03cfe0300. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1951.103425] env[63379]: DEBUG nova.network.neutron [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.122510] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780345, 'name': ReconfigVM_Task, 'duration_secs': 0.480632} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.122776] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Reconfigured VM instance instance-0000006e to attach disk [datastore1] e1681d89-2f55-47b7-9962-55aa169b3d0a/e1681d89-2f55-47b7-9962-55aa169b3d0a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1951.123391] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d8e77b1-c1b1-499e-84fd-1ec3e0bf96e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.128870] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1951.128870] env[63379]: value = "task-1780346" [ 1951.128870] env[63379]: _type = "Task" [ 1951.128870] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.136550] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780346, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.162433] env[63379]: DEBUG oslo_vmware.api [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780344, 'name': ReconfigVM_Task, 'duration_secs': 0.990388} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.162725] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-486e6792-4026-4427-b9d1-876825eb94eb/volume-486e6792-4026-4427-b9d1-876825eb94eb.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1951.167475] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89dae602-3b1a-499c-8ed7-9a9397865da0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.182291] env[63379]: DEBUG oslo_vmware.api [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1951.182291] env[63379]: value = "task-1780347" [ 1951.182291] env[63379]: _type = "Task" [ 1951.182291] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.190528] env[63379]: DEBUG oslo_vmware.api [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.455221] env[63379]: DEBUG nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1951.584188] env[63379]: DEBUG nova.network.neutron [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Successfully updated port: 44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1951.600017] env[63379]: DEBUG nova.compute.manager [req-503ed81e-2590-4eac-ad2a-d7a6e461b37c req-b6ff2a4d-df57-4b7c-9589-8ae286d670b5 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-vif-plugged-44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1951.600278] env[63379]: DEBUG oslo_concurrency.lockutils [req-503ed81e-2590-4eac-ad2a-d7a6e461b37c req-b6ff2a4d-df57-4b7c-9589-8ae286d670b5 service nova] Acquiring lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.600515] env[63379]: DEBUG oslo_concurrency.lockutils [req-503ed81e-2590-4eac-ad2a-d7a6e461b37c req-b6ff2a4d-df57-4b7c-9589-8ae286d670b5 service nova] Lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.600691] env[63379]: DEBUG oslo_concurrency.lockutils [req-503ed81e-2590-4eac-ad2a-d7a6e461b37c req-b6ff2a4d-df57-4b7c-9589-8ae286d670b5 service nova] Lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.600859] env[63379]: DEBUG nova.compute.manager [req-503ed81e-2590-4eac-ad2a-d7a6e461b37c req-b6ff2a4d-df57-4b7c-9589-8ae286d670b5 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] No waiting events found dispatching network-vif-plugged-44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1951.601041] env[63379]: WARNING nova.compute.manager [req-503ed81e-2590-4eac-ad2a-d7a6e461b37c req-b6ff2a4d-df57-4b7c-9589-8ae286d670b5 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received unexpected event network-vif-plugged-44cd89ca-ba87-42ee-bfba-e868680926c7 for instance with vm_state active and task_state None. [ 1951.606377] env[63379]: DEBUG oslo_concurrency.lockutils [req-fb98ff89-5b3d-424c-95b0-5be13a1b0e9d req-6c12ab27-2588-49c3-8e93-3d9331cf6c66 service nova] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.638935] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780346, 'name': Rename_Task, 'duration_secs': 0.151911} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.639236] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1951.639484] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f425aea1-759c-4123-be9c-e50069c12338 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.645658] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1951.645658] env[63379]: value = "task-1780348" [ 1951.645658] env[63379]: _type = "Task" [ 1951.645658] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.653256] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.692208] env[63379]: DEBUG oslo_vmware.api [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780347, 'name': ReconfigVM_Task, 'duration_secs': 0.177661} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.692512] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369507', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'name': 'volume-486e6792-4026-4427-b9d1-876825eb94eb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6', 'attached_at': '', 'detached_at': '', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'serial': '486e6792-4026-4427-b9d1-876825eb94eb'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1951.978532] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.978804] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.980349] env[63379]: INFO nova.compute.claims [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1952.086409] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.086536] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.086706] env[63379]: DEBUG nova.network.neutron [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1952.154924] env[63379]: DEBUG oslo_vmware.api [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780348, 'name': PowerOnVM_Task, 'duration_secs': 0.462558} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.155269] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1952.155433] env[63379]: INFO nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Took 7.73 seconds to spawn the instance on the hypervisor. [ 1952.155613] env[63379]: DEBUG nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1952.156376] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5193699b-167d-4b34-a665-364430ac765c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.621239] env[63379]: WARNING nova.network.neutron [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] 501025fb-aee7-4f74-80fd-af4976529317 already exists in list: networks containing: ['501025fb-aee7-4f74-80fd-af4976529317']. ignoring it [ 1952.673913] env[63379]: INFO nova.compute.manager [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Took 13.66 seconds to build instance. [ 1952.730412] env[63379]: DEBUG nova.objects.instance [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'flavor' on Instance uuid 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1952.906080] env[63379]: DEBUG nova.network.neutron [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44cd89ca-ba87-42ee-bfba-e868680926c7", "address": "fa:16:3e:51:f5:6a", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44cd89ca-ba", "ovs_interfaceid": "44cd89ca-ba87-42ee-bfba-e868680926c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.976174] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.110196] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff1d00b-97ff-4354-a6dd-b70548c9ec9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.118059] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ead66bd-cf84-4cd1-bbfc-4f910878c8b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.147180] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea7aff3-925b-481b-b2f7-56a06d11b619 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.154544] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0a0a35-fc2e-4cec-aabc-582e959792e1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.167521] env[63379]: DEBUG nova.compute.provider_tree [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1953.175627] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9df85064-7169-4a1c-bf36-bda19d399695 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.164s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.238464] env[63379]: DEBUG oslo_concurrency.lockutils [None req-15e1b101-dae9-499e-a541-5d4859527db8 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.756s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.238464] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.260s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.409037] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.409741] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.409913] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.410795] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb76f29e-33a4-4c63-8b24-dae048cccd09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.429363] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1953.429584] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1953.429747] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1953.429935] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1953.430100] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1953.430259] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1953.430512] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1953.430685] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1953.430857] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1953.431043] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1953.431241] env[63379]: DEBUG nova.virt.hardware [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1953.438075] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Reconfiguring VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1953.438469] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70b380ef-0cb5-41b1-a3aa-807eb2aa81d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.457059] env[63379]: DEBUG oslo_vmware.api [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1953.457059] env[63379]: value = "task-1780349" [ 1953.457059] env[63379]: _type = "Task" [ 1953.457059] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.466766] env[63379]: DEBUG oslo_vmware.api [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780349, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.687680] env[63379]: ERROR nova.scheduler.client.report [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [req-202ec45f-a727-48c0-99e7-e6be9f4fffd9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-202ec45f-a727-48c0-99e7-e6be9f4fffd9"}]} [ 1953.703982] env[63379]: DEBUG nova.scheduler.client.report [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1953.713151] env[63379]: DEBUG nova.compute.manager [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-changed-44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1953.713322] env[63379]: DEBUG nova.compute.manager [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing instance network info cache due to event network-changed-44cd89ca-ba87-42ee-bfba-e868680926c7. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1953.713558] env[63379]: DEBUG oslo_concurrency.lockutils [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.713721] env[63379]: DEBUG oslo_concurrency.lockutils [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.713903] env[63379]: DEBUG nova.network.neutron [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Refreshing network info cache for port 44cd89ca-ba87-42ee-bfba-e868680926c7 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1953.718241] env[63379]: DEBUG nova.scheduler.client.report [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1953.718523] env[63379]: DEBUG nova.compute.provider_tree [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1953.729981] env[63379]: DEBUG nova.scheduler.client.report [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1953.740566] env[63379]: INFO nova.compute.manager [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Detaching volume 486e6792-4026-4427-b9d1-876825eb94eb [ 1953.748346] env[63379]: DEBUG nova.scheduler.client.report [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1953.775802] env[63379]: INFO nova.virt.block_device [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Attempting to driver detach volume 486e6792-4026-4427-b9d1-876825eb94eb from mountpoint /dev/sdb [ 1953.776217] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1953.776744] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369507', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'name': 'volume-486e6792-4026-4427-b9d1-876825eb94eb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6', 'attached_at': '', 'detached_at': '', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'serial': '486e6792-4026-4427-b9d1-876825eb94eb'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1953.777414] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a0d288-b4c4-46d9-afd9-6e984cb944b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.809798] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3304d6-4314-4a85-8f27-a09c1bf5f73a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.819279] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f318f20e-0d8b-478a-b62a-20bd1dd55a4d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.850984] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5ad33d-cea2-4576-ba3f-e66a0f0436b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.867413] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] The volume has not been displaced from its original location: [datastore1] volume-486e6792-4026-4427-b9d1-876825eb94eb/volume-486e6792-4026-4427-b9d1-876825eb94eb.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1953.875202] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1953.878314] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca5c315f-2afa-4b41-8a57-e3c8ecb3488b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.898637] env[63379]: DEBUG oslo_vmware.api [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1953.898637] env[63379]: value = "task-1780350" [ 1953.898637] env[63379]: _type = "Task" [ 1953.898637] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.911125] env[63379]: DEBUG oslo_vmware.api [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780350, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.957678] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cf49d5-a389-4fb5-adea-71750264ec8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.964762] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.972953] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90d7094-a92e-4df9-9318-58ee0da093b0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.976441] env[63379]: DEBUG oslo_vmware.api [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.008692] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256f1413-a44f-463e-977d-2b68cedf799c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.016827] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2c13f5-1b05-4c61-b089-19f66b69185a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.032412] env[63379]: DEBUG nova.compute.provider_tree [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1954.409529] env[63379]: DEBUG oslo_vmware.api [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780350, 'name': ReconfigVM_Task, 'duration_secs': 0.251617} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.409863] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1954.415263] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c047dd9-cf0c-48f9-be1f-a952e07acdc9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.433677] env[63379]: DEBUG oslo_vmware.api [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1954.433677] env[63379]: value = "task-1780351" [ 1954.433677] env[63379]: _type = "Task" [ 1954.433677] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.446042] env[63379]: DEBUG oslo_vmware.api [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780351, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.460100] env[63379]: DEBUG nova.network.neutron [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updated VIF entry in instance network info cache for port 44cd89ca-ba87-42ee-bfba-e868680926c7. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1954.460531] env[63379]: DEBUG nova.network.neutron [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44cd89ca-ba87-42ee-bfba-e868680926c7", "address": "fa:16:3e:51:f5:6a", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44cd89ca-ba", "ovs_interfaceid": "44cd89ca-ba87-42ee-bfba-e868680926c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.468260] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.473127] env[63379]: DEBUG oslo_vmware.api [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780349, 'name': ReconfigVM_Task, 'duration_secs': 0.62323} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.473928] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.477523] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Reconfigured VM to attach interface {{(pid=63379) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1954.567224] env[63379]: DEBUG nova.scheduler.client.report [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 153 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1954.567543] env[63379]: DEBUG nova.compute.provider_tree [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 153 to 154 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1954.567744] env[63379]: DEBUG nova.compute.provider_tree [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1954.946514] env[63379]: DEBUG oslo_vmware.api [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780351, 'name': ReconfigVM_Task, 'duration_secs': 0.153546} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.946814] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369507', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'name': 'volume-486e6792-4026-4427-b9d1-876825eb94eb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6', 'attached_at': '', 'detached_at': '', 'volume_id': '486e6792-4026-4427-b9d1-876825eb94eb', 'serial': '486e6792-4026-4427-b9d1-876825eb94eb'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1954.967948] env[63379]: DEBUG oslo_concurrency.lockutils [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.968528] env[63379]: DEBUG nova.compute.manager [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Received event network-changed-a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1954.968696] env[63379]: DEBUG nova.compute.manager [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Refreshing instance network info cache due to event network-changed-a482c861-81a4-437a-a78a-27d652a2e57d. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1954.969400] env[63379]: DEBUG oslo_concurrency.lockutils [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] Acquiring lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.969400] env[63379]: DEBUG oslo_concurrency.lockutils [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] Acquired lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.969546] env[63379]: DEBUG nova.network.neutron [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Refreshing network info cache for port a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1954.980187] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4d36cce2-8644-49be-91af-4f23a5e7c677 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-d4988643-18ff-44c8-8363-e0de43da2abe-44cd89ca-ba87-42ee-bfba-e868680926c7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.215s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.072859] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.094s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.073498] env[63379]: DEBUG nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1955.076296] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.608s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.076477] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.076660] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1955.077828] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26174ed0-1ad3-46bb-aab3-d32bbda01168 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.086228] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d6ffa0-37d4-4511-846e-631e8971c63a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.101709] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c9a988-6f20-48a2-856b-458a7653744d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.108883] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92baaf43-4487-465f-b692-53478abfb75b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.138730] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180013MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1955.138964] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.139225] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.495452] env[63379]: DEBUG nova.objects.instance [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'flavor' on Instance uuid 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1955.579527] env[63379]: DEBUG nova.compute.utils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1955.581017] env[63379]: DEBUG nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1955.581670] env[63379]: DEBUG nova.network.neutron [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1955.629108] env[63379]: DEBUG nova.policy [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22787c1f10df433b9f2db1de154f6778', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28f7e38c300546a2a7a033cb12c7f89a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1955.720280] env[63379]: DEBUG nova.network.neutron [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Updated VIF entry in instance network info cache for port a482c861-81a4-437a-a78a-27d652a2e57d. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1955.720682] env[63379]: DEBUG nova.network.neutron [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Updating instance_info_cache with network_info: [{"id": "a482c861-81a4-437a-a78a-27d652a2e57d", "address": "fa:16:3e:3a:8d:40", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa482c861-81", "ovs_interfaceid": "a482c861-81a4-437a-a78a-27d652a2e57d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.957017] env[63379]: DEBUG nova.network.neutron [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Successfully created port: d8f16458-adcd-4f5b-861b-e694816aa3d3 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1956.085145] env[63379]: DEBUG nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1956.169511] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 510db409-0b4c-494a-8084-39ef3cd6c918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.169683] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 48c17c3b-1197-46cb-a0f7-3671b2d82c7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.169809] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.169930] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d4988643-18ff-44c8-8363-e0de43da2abe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.170060] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 1c983c16-6f86-4932-9698-7fb1428ca231 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.170181] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance df8d513d-c201-4ffe-894e-cf8c3318cecc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.170294] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 10fc842d-b821-4103-b6a5-f5b2fc46ea74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.170405] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance e1681d89-2f55-47b7-9962-55aa169b3d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.170526] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1956.170781] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1956.170943] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1956.223858] env[63379]: DEBUG oslo_concurrency.lockutils [req-cb5d6506-a50a-431d-8137-08e593690d37 req-bb4a27e2-6087-4434-a6ba-77fdedb41365 service nova] Releasing lock "refresh_cache-e1681d89-2f55-47b7-9962-55aa169b3d0a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.296777] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcd2ba6-9d33-4973-bd6c-d957f1faf140 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.305258] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb9842a-4697-490e-9ba9-6b699a6c2bff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.336636] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296067dc-4dd8-4985-a71f-72fd80730e35 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.344743] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c3a2d6-ede3-457b-b22e-48c8da197d37 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.359169] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1956.503470] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2c00fdc9-4352-4f92-8486-4fb041b16d08 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.267s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.547988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "interface-d4988643-18ff-44c8-8363-e0de43da2abe-44cd89ca-ba87-42ee-bfba-e868680926c7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.547988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-d4988643-18ff-44c8-8363-e0de43da2abe-44cd89ca-ba87-42ee-bfba-e868680926c7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.862693] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1957.049871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.050103] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.051029] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842c00dd-a617-4190-b1da-45a6824fd0c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.068629] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76cbcdd-c305-4b76-a3c0-85e95ce9d92a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.095610] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Reconfiguring VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1957.095871] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0132298-0d60-4a32-b07f-a604dac0d272 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.111693] env[63379]: DEBUG nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1957.122880] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1957.122880] env[63379]: value = "task-1780353" [ 1957.122880] env[63379]: _type = "Task" [ 1957.122880] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.136785] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.140623] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1957.141262] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1957.141942] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1957.142288] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1957.143262] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1957.143262] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1957.143262] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1957.143262] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1957.143469] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1957.143539] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1957.143881] env[63379]: DEBUG nova.virt.hardware [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1957.147717] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46a1255-9e90-46c3-8df5-2f899fb85da6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.164387] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1de0ce-485b-46b8-9575-830cb8d9799f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.323105] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.323465] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.323696] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.323885] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.324086] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.326612] env[63379]: INFO nova.compute.manager [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Terminating instance [ 1957.328571] env[63379]: DEBUG nova.compute.manager [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1957.328766] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1957.329752] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eced113-e1b1-4bf4-98bb-75ab6623303c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.337572] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1957.338046] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8047430d-e1dd-41ff-a2eb-aa35efde047d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.347745] env[63379]: DEBUG oslo_vmware.api [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1957.347745] env[63379]: value = "task-1780354" [ 1957.347745] env[63379]: _type = "Task" [ 1957.347745] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.361050] env[63379]: DEBUG oslo_vmware.api [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.368091] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1957.368364] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.229s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.406572] env[63379]: DEBUG nova.compute.manager [req-dbb7493e-a7db-4fae-9e4c-9cbf622470b2 req-6a130d0d-a3ba-4ae0-864f-9277f74c8721 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Received event network-vif-plugged-d8f16458-adcd-4f5b-861b-e694816aa3d3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1957.406808] env[63379]: DEBUG oslo_concurrency.lockutils [req-dbb7493e-a7db-4fae-9e4c-9cbf622470b2 req-6a130d0d-a3ba-4ae0-864f-9277f74c8721 service nova] Acquiring lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.407042] env[63379]: DEBUG oslo_concurrency.lockutils [req-dbb7493e-a7db-4fae-9e4c-9cbf622470b2 req-6a130d0d-a3ba-4ae0-864f-9277f74c8721 service nova] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.407241] env[63379]: DEBUG oslo_concurrency.lockutils [req-dbb7493e-a7db-4fae-9e4c-9cbf622470b2 req-6a130d0d-a3ba-4ae0-864f-9277f74c8721 service nova] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.407401] env[63379]: DEBUG nova.compute.manager [req-dbb7493e-a7db-4fae-9e4c-9cbf622470b2 req-6a130d0d-a3ba-4ae0-864f-9277f74c8721 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] No waiting events found dispatching network-vif-plugged-d8f16458-adcd-4f5b-861b-e694816aa3d3 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1957.407620] env[63379]: WARNING nova.compute.manager [req-dbb7493e-a7db-4fae-9e4c-9cbf622470b2 req-6a130d0d-a3ba-4ae0-864f-9277f74c8721 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Received unexpected event network-vif-plugged-d8f16458-adcd-4f5b-861b-e694816aa3d3 for instance with vm_state building and task_state spawning. [ 1957.501761] env[63379]: DEBUG nova.network.neutron [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Successfully updated port: d8f16458-adcd-4f5b-861b-e694816aa3d3 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1957.633416] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.857776] env[63379]: DEBUG oslo_vmware.api [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780354, 'name': PowerOffVM_Task, 'duration_secs': 0.321443} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.858086] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1957.858273] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1957.858520] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3614e813-e38c-418e-8eb6-02c0a6b137c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.948236] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1957.948459] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1957.948584] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleting the datastore file [datastore1] 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1957.948874] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-673fa5e2-9f2e-4afb-864c-9ef4f9c008ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.955543] env[63379]: DEBUG oslo_vmware.api [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1957.955543] env[63379]: value = "task-1780357" [ 1957.955543] env[63379]: _type = "Task" [ 1957.955543] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.964191] env[63379]: DEBUG oslo_vmware.api [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780357, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.005157] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "refresh_cache-2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.005321] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "refresh_cache-2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.005523] env[63379]: DEBUG nova.network.neutron [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1958.134198] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.368174] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.368429] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.368591] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.368744] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1958.368890] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 1958.465120] env[63379]: DEBUG oslo_vmware.api [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780357, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199113} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.465397] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1958.465582] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1958.465808] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1958.466036] env[63379]: INFO nova.compute.manager [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1958.466286] env[63379]: DEBUG oslo.service.loopingcall [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1958.466486] env[63379]: DEBUG nova.compute.manager [-] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1958.466578] env[63379]: DEBUG nova.network.neutron [-] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1958.535685] env[63379]: DEBUG nova.network.neutron [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1958.635560] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.660691] env[63379]: DEBUG nova.network.neutron [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Updating instance_info_cache with network_info: [{"id": "d8f16458-adcd-4f5b-861b-e694816aa3d3", "address": "fa:16:3e:e7:75:2e", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8f16458-ad", "ovs_interfaceid": "d8f16458-adcd-4f5b-861b-e694816aa3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.136307] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.163623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "refresh_cache-2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.164024] env[63379]: DEBUG nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Instance network_info: |[{"id": "d8f16458-adcd-4f5b-861b-e694816aa3d3", "address": "fa:16:3e:e7:75:2e", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8f16458-ad", "ovs_interfaceid": "d8f16458-adcd-4f5b-861b-e694816aa3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1959.164453] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:75:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8f16458-adcd-4f5b-861b-e694816aa3d3', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1959.176420] env[63379]: DEBUG oslo.service.loopingcall [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1959.176678] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1959.176982] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72be548d-bd3f-4e13-a48f-b6b19a431bd6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.199887] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1959.199887] env[63379]: value = "task-1780358" [ 1959.199887] env[63379]: _type = "Task" [ 1959.199887] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.207548] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780358, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.349272] env[63379]: DEBUG nova.network.neutron [-] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.438283] env[63379]: DEBUG nova.compute.manager [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Received event network-changed-d8f16458-adcd-4f5b-861b-e694816aa3d3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1959.438383] env[63379]: DEBUG nova.compute.manager [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Refreshing instance network info cache due to event network-changed-d8f16458-adcd-4f5b-861b-e694816aa3d3. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1959.438582] env[63379]: DEBUG oslo_concurrency.lockutils [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] Acquiring lock "refresh_cache-2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.438743] env[63379]: DEBUG oslo_concurrency.lockutils [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] Acquired lock "refresh_cache-2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.438934] env[63379]: DEBUG nova.network.neutron [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Refreshing network info cache for port d8f16458-adcd-4f5b-861b-e694816aa3d3 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1959.635787] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.709982] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780358, 'name': CreateVM_Task, 'duration_secs': 0.41697} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.710186] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1959.710871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.711093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.711421] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1959.711676] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39f2d246-e01b-45cb-9188-9a9d3e5127fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.716256] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1959.716256] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268dd0e-672f-799a-b494-f3c8532aaca8" [ 1959.716256] env[63379]: _type = "Task" [ 1959.716256] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.723622] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268dd0e-672f-799a-b494-f3c8532aaca8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.851536] env[63379]: INFO nova.compute.manager [-] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Took 1.38 seconds to deallocate network for instance. [ 1960.135262] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.184893] env[63379]: DEBUG nova.network.neutron [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Updated VIF entry in instance network info cache for port d8f16458-adcd-4f5b-861b-e694816aa3d3. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1960.185245] env[63379]: DEBUG nova.network.neutron [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Updating instance_info_cache with network_info: [{"id": "d8f16458-adcd-4f5b-861b-e694816aa3d3", "address": "fa:16:3e:e7:75:2e", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8f16458-ad", "ovs_interfaceid": "d8f16458-adcd-4f5b-861b-e694816aa3d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.226922] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268dd0e-672f-799a-b494-f3c8532aaca8, 'name': SearchDatastore_Task, 'duration_secs': 0.356725} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.227251] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.227612] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1960.227711] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.227860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.228087] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1960.228351] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d4782f4-d113-494b-91d6-973a9bf0089b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.236320] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1960.236499] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1960.237167] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40ee6752-e25f-43f2-8b07-fbc1e15a6e85 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.242174] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1960.242174] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a39eb-d62b-de89-f72a-95d5f0652513" [ 1960.242174] env[63379]: _type = "Task" [ 1960.242174] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.249533] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a39eb-d62b-de89-f72a-95d5f0652513, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.357733] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.358158] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.358451] env[63379]: DEBUG nova.objects.instance [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'resources' on Instance uuid 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1960.636096] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.687921] env[63379]: DEBUG oslo_concurrency.lockutils [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] Releasing lock "refresh_cache-2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.688229] env[63379]: DEBUG nova.compute.manager [req-9d601438-d19b-462a-befa-207c1367d0b7 req-1ca5be1c-74d5-47c4-8648-8e3d188d82a1 service nova] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Received event network-vif-deleted-3a859294-da1a-435c-aa5c-a1ec72c124c2 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1960.751791] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527a39eb-d62b-de89-f72a-95d5f0652513, 'name': SearchDatastore_Task, 'duration_secs': 0.008722} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.752500] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f73d5092-8f10-4016-bbf2-5f1f6bd50718 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.756969] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1960.756969] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf290b-9c82-fa40-390f-5844573726f3" [ 1960.756969] env[63379]: _type = "Task" [ 1960.756969] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.764115] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf290b-9c82-fa40-390f-5844573726f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.964705] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1960.964918] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 1960.978574] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e8c371-ede5-405c-a681-8c035b5c4f01 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.988734] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72f4d3d-883f-48c2-a3b9-8173335f32f5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.019040] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e6be9c-9c75-4b49-9e3b-00d4d72e5a63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.026864] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04522f37-7e6b-48b2-bd67-639a1fb24ea5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.040780] env[63379]: DEBUG nova.compute.provider_tree [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.140473] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.267316] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cf290b-9c82-fa40-390f-5844573726f3, 'name': SearchDatastore_Task, 'duration_secs': 0.008903} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.267707] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.268032] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576/2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1961.268446] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-764af80b-ed03-4086-a616-e84cb7cddcb8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.275312] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1961.275312] env[63379]: value = "task-1780360" [ 1961.275312] env[63379]: _type = "Task" [ 1961.275312] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.284014] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.471666] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 1961.543610] env[63379]: DEBUG nova.scheduler.client.report [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1961.638682] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.787805] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780360, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.048675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.067041] env[63379]: INFO nova.scheduler.client.report [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted allocations for instance 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6 [ 1962.138058] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.286098] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742177} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.286393] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576/2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1962.286605] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1962.286855] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c42718a2-c568-4b0d-bf33-bec2968ffd7f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.292886] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1962.292886] env[63379]: value = "task-1780362" [ 1962.292886] env[63379]: _type = "Task" [ 1962.292886] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.302182] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780362, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.302616] env[63379]: DEBUG nova.compute.manager [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1962.575596] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cfadecc5-8c52-4842-bc80-f13f7263fa4e tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.252s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.638693] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.802879] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780362, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0655} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.803081] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1962.803841] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a3f8ba-edce-4dfd-9019-2143bd37a29f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.828478] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576/2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1962.829436] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.829665] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.831281] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b219ebf7-1638-4643-bd1a-93d3cf704a36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.847492] env[63379]: INFO nova.compute.claims [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1962.856277] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1962.856277] env[63379]: value = "task-1780363" [ 1962.856277] env[63379]: _type = "Task" [ 1962.856277] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.866270] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780363, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.963607] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.963867] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.138811] env[63379]: DEBUG oslo_vmware.api [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780353, 'name': ReconfigVM_Task, 'duration_secs': 5.945963} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.139080] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.139303] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Reconfigured VM to detach interface {{(pid=63379) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1963.353072] env[63379]: INFO nova.compute.resource_tracker [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating resource usage from migration 586ced43-f4f3-4a5a-8699-2d9363caa025 [ 1963.365566] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780363, 'name': ReconfigVM_Task, 'duration_secs': 0.290085} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.366140] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576/2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1963.367189] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4af19c83-b397-44f6-af13-cc6432002072 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.376956] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1963.376956] env[63379]: value = "task-1780364" [ 1963.376956] env[63379]: _type = "Task" [ 1963.376956] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.384498] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780364, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.492050] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f0aa1d-6488-4937-838f-34c8994a44e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.497091] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d9effc-9a71-4aa0-a0b1-b221bbd8ba8d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.528854] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3687498-e804-43b8-8173-22c492a7b6bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.536206] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f25295-00a8-4258-9e42-1ff654f84308 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.550843] env[63379]: DEBUG nova.compute.provider_tree [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.891164] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780364, 'name': Rename_Task, 'duration_secs': 0.14724} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.892194] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1963.892194] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-caab6483-47a8-499b-aab4-304773e4b52d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.900441] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1963.900441] env[63379]: value = "task-1780366" [ 1963.900441] env[63379]: _type = "Task" [ 1963.900441] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.910755] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780366, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.053874] env[63379]: DEBUG nova.scheduler.client.report [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1964.412789] env[63379]: DEBUG oslo_vmware.api [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780366, 'name': PowerOnVM_Task, 'duration_secs': 0.50135} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.413175] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1964.413514] env[63379]: INFO nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Took 7.30 seconds to spawn the instance on the hypervisor. [ 1964.413813] env[63379]: DEBUG nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1964.414994] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313021f0-5ea6-4301-959c-59718499b400 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.560083] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 1.730s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.560335] env[63379]: INFO nova.compute.manager [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Migrating [ 1964.688614] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.688826] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquired lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.689038] env[63379]: DEBUG nova.network.neutron [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1964.698035] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "a39c5511-3efc-41e9-8902-692f237557e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.698220] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.933107] env[63379]: INFO nova.compute.manager [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Took 12.97 seconds to build instance. [ 1965.078216] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.078463] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.078656] env[63379]: DEBUG nova.network.neutron [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1965.162673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "d4988643-18ff-44c8-8363-e0de43da2abe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.162673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "d4988643-18ff-44c8-8363-e0de43da2abe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.162673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.162673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.162673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "d4988643-18ff-44c8-8363-e0de43da2abe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.163033] env[63379]: INFO nova.compute.manager [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Terminating instance [ 1965.164890] env[63379]: DEBUG nova.compute.manager [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1965.165100] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1965.166013] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0e0ea9-15ac-485e-bae4-da2e4be54069 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.173496] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1965.174179] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8a3cc3f-1413-4bdc-93e5-3852598e0700 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.179751] env[63379]: DEBUG oslo_vmware.api [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1965.179751] env[63379]: value = "task-1780367" [ 1965.179751] env[63379]: _type = "Task" [ 1965.179751] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.188281] env[63379]: DEBUG oslo_vmware.api [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780367, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.200095] env[63379]: DEBUG nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1965.416511] env[63379]: INFO nova.network.neutron [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Port 44cd89ca-ba87-42ee-bfba-e868680926c7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1965.416905] env[63379]: DEBUG nova.network.neutron [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [{"id": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "address": "fa:16:3e:25:56:5d", "network": {"id": "501025fb-aee7-4f74-80fd-af4976529317", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2074442737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edb0d4b37a67492f9e0275b341e80cc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41bdc6f8-c0", "ovs_interfaceid": "41bdc6f8-c059-49a5-86a4-a7a03cfe0300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.418385] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.435330] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d9fc5827-17c8-438a-95f5-a7c0007ddf9e tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.483s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.435561] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.017s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.435930] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.436185] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.436432] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.438542] env[63379]: INFO nova.compute.manager [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Terminating instance [ 1965.440286] env[63379]: DEBUG nova.compute.manager [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1965.440485] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1965.441349] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533ce347-e3e4-4f09-8b1a-1d39ac192684 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.449440] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1965.449440] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4d7df2c-1353-4a2b-8eed-fce8165b7808 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.454691] env[63379]: DEBUG oslo_vmware.api [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1965.454691] env[63379]: value = "task-1780368" [ 1965.454691] env[63379]: _type = "Task" [ 1965.454691] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.462554] env[63379]: DEBUG oslo_vmware.api [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.695722] env[63379]: DEBUG oslo_vmware.api [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780367, 'name': PowerOffVM_Task, 'duration_secs': 0.240566} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.696112] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1965.696325] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1965.696628] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46ab219a-a1c4-43bc-b1ee-a7af85855faa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.724423] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.724743] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.726767] env[63379]: INFO nova.compute.claims [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1965.829479] env[63379]: DEBUG nova.network.neutron [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.888332] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1965.888546] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1965.888732] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleting the datastore file [datastore1] d4988643-18ff-44c8-8363-e0de43da2abe {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1965.889030] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4ff6511-f091-4ef2-8667-dda2be762a7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.895831] env[63379]: DEBUG oslo_vmware.api [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1965.895831] env[63379]: value = "task-1780371" [ 1965.895831] env[63379]: _type = "Task" [ 1965.895831] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.903112] env[63379]: DEBUG oslo_vmware.api [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780371, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.920063] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Releasing lock "refresh_cache-d4988643-18ff-44c8-8363-e0de43da2abe" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.964226] env[63379]: DEBUG oslo_vmware.api [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780368, 'name': PowerOffVM_Task, 'duration_secs': 0.271494} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.964725] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1965.964725] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1965.964888] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e4e7639-b4a7-4b34-9a08-2283ba91d8f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.064727] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1966.064971] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1966.065189] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleting the datastore file [datastore1] 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1966.065463] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd55a136-ef4e-46dc-b597-7baf7c16a4c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.071435] env[63379]: DEBUG oslo_vmware.api [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1966.071435] env[63379]: value = "task-1780373" [ 1966.071435] env[63379]: _type = "Task" [ 1966.071435] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.079161] env[63379]: DEBUG oslo_vmware.api [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.332162] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.406094] env[63379]: DEBUG oslo_vmware.api [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780371, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174328} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.406363] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1966.406553] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1966.406736] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1966.406920] env[63379]: INFO nova.compute.manager [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1966.407179] env[63379]: DEBUG oslo.service.loopingcall [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.407383] env[63379]: DEBUG nova.compute.manager [-] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1966.407467] env[63379]: DEBUG nova.network.neutron [-] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1966.423452] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4b82a464-7f9f-474a-8a93-7a76730927ec tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "interface-d4988643-18ff-44c8-8363-e0de43da2abe-44cd89ca-ba87-42ee-bfba-e868680926c7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.876s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.581016] env[63379]: DEBUG oslo_vmware.api [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143665} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.581351] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1966.581642] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1966.581741] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1966.581901] env[63379]: INFO nova.compute.manager [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1966.582155] env[63379]: DEBUG oslo.service.loopingcall [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.582352] env[63379]: DEBUG nova.compute.manager [-] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1966.582443] env[63379]: DEBUG nova.network.neutron [-] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1966.863739] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c6fef5-af49-41cd-91f9-a21de03679b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.872390] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeee33c7-2bb2-4f0e-96f4-5bc20c395d52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.905235] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40152069-0d94-420e-8b9d-3d0db11f3d0f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.914304] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0cc4cf-eae8-4f73-a3df-374adbbd4fc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.931895] env[63379]: DEBUG nova.compute.provider_tree [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1966.959135] env[63379]: DEBUG nova.compute.manager [req-2b343a92-2322-49c4-a8e6-e6da313dc242 req-d915a12e-8c8b-4d4a-9d3b-ff8f174ed234 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Received event network-vif-deleted-d8f16458-adcd-4f5b-861b-e694816aa3d3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1966.959135] env[63379]: INFO nova.compute.manager [req-2b343a92-2322-49c4-a8e6-e6da313dc242 req-d915a12e-8c8b-4d4a-9d3b-ff8f174ed234 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Neutron deleted interface d8f16458-adcd-4f5b-861b-e694816aa3d3; detaching it from the instance and deleting it from the info cache [ 1966.959135] env[63379]: DEBUG nova.network.neutron [req-2b343a92-2322-49c4-a8e6-e6da313dc242 req-d915a12e-8c8b-4d4a-9d3b-ff8f174ed234 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.964611] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1967.068769] env[63379]: DEBUG nova.compute.manager [req-94f80809-ebd2-42ea-9d2e-945a8f8a9818 req-8649a80d-f06e-4ea9-84c9-93d990bb93bc service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Received event network-vif-deleted-41bdc6f8-c059-49a5-86a4-a7a03cfe0300 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1967.068769] env[63379]: INFO nova.compute.manager [req-94f80809-ebd2-42ea-9d2e-945a8f8a9818 req-8649a80d-f06e-4ea9-84c9-93d990bb93bc service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Neutron deleted interface 41bdc6f8-c059-49a5-86a4-a7a03cfe0300; detaching it from the instance and deleting it from the info cache [ 1967.068769] env[63379]: DEBUG nova.network.neutron [req-94f80809-ebd2-42ea-9d2e-945a8f8a9818 req-8649a80d-f06e-4ea9-84c9-93d990bb93bc service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.409326] env[63379]: DEBUG nova.network.neutron [-] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.436016] env[63379]: DEBUG nova.scheduler.client.report [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1967.463357] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-841cf472-d1ae-4a19-be01-59c30b992b27 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.473608] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa98f12-e88b-4ae5-a505-481fa1204627 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.502190] env[63379]: DEBUG nova.compute.manager [req-2b343a92-2322-49c4-a8e6-e6da313dc242 req-d915a12e-8c8b-4d4a-9d3b-ff8f174ed234 service nova] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Detach interface failed, port_id=d8f16458-adcd-4f5b-861b-e694816aa3d3, reason: Instance 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1967.547259] env[63379]: DEBUG nova.network.neutron [-] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.572553] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60592451-c5c9-46de-ac71-741cb2bf20dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.581809] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d616ec8-8586-418e-aa49-68754abea9ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.611445] env[63379]: DEBUG nova.compute.manager [req-94f80809-ebd2-42ea-9d2e-945a8f8a9818 req-8649a80d-f06e-4ea9-84c9-93d990bb93bc service nova] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Detach interface failed, port_id=41bdc6f8-c059-49a5-86a4-a7a03cfe0300, reason: Instance d4988643-18ff-44c8-8363-e0de43da2abe could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1967.848054] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28f2865-9ad1-464a-85b4-e68d37a33898 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.867796] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance '1c983c16-6f86-4932-9698-7fb1428ca231' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1967.918822] env[63379]: INFO nova.compute.manager [-] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Took 1.34 seconds to deallocate network for instance. [ 1967.940691] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.941242] env[63379]: DEBUG nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1968.051567] env[63379]: INFO nova.compute.manager [-] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Took 1.64 seconds to deallocate network for instance. [ 1968.373733] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1968.374074] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc02afb5-fe20-448e-ad57-8b509538b37d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.382080] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1968.382080] env[63379]: value = "task-1780375" [ 1968.382080] env[63379]: _type = "Task" [ 1968.382080] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.390718] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.424675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.424902] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.425166] env[63379]: DEBUG nova.objects.instance [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lazy-loading 'resources' on Instance uuid 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1968.445554] env[63379]: DEBUG nova.compute.utils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1968.448024] env[63379]: DEBUG nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1968.448024] env[63379]: DEBUG nova.network.neutron [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1968.494729] env[63379]: DEBUG nova.policy [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4daab3ae5955497a9d25b4ef59118d0e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba1a1cf17f9941b299a6102689835f88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1968.558733] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.763077] env[63379]: DEBUG nova.network.neutron [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Successfully created port: 055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1968.892632] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780375, 'name': PowerOffVM_Task, 'duration_secs': 0.205708} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.892903] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1968.893165] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance '1c983c16-6f86-4932-9698-7fb1428ca231' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1968.951963] env[63379]: DEBUG nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1969.082741] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83287cdf-e5a3-4960-8dd4-7c22ec590a9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.091037] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805a4415-466c-4e8c-8cb5-c3f9c09d3cbe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.124358] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ec50ad-d3a9-4253-a3bf-b24e95a8dcf3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.132350] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c74f720-43d5-4b71-9023-06c621623189 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.145616] env[63379]: DEBUG nova.compute.provider_tree [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1969.399782] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1969.400085] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1969.400289] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1969.400519] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1969.400681] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1969.400864] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1969.401104] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1969.401292] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1969.401482] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1969.401655] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1969.401834] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1969.407324] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd192cde-0fc9-4331-ac8d-8afee0bdea29 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.425165] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1969.425165] env[63379]: value = "task-1780376" [ 1969.425165] env[63379]: _type = "Task" [ 1969.425165] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.433214] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.648796] env[63379]: DEBUG nova.scheduler.client.report [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1969.936549] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780376, 'name': ReconfigVM_Task, 'duration_secs': 0.171871} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.936871] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance '1c983c16-6f86-4932-9698-7fb1428ca231' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1969.963192] env[63379]: DEBUG nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1969.998269] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1969.998628] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1969.998859] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1969.999152] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1969.999370] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1969.999583] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1969.999887] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1970.000142] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1970.000390] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1970.000640] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1970.000907] env[63379]: DEBUG nova.virt.hardware [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1970.002244] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9b0e11-9cd9-418f-bc6d-143d2818e4f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.012434] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae014dc-e874-4595-972d-d616e08df844 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.154897] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.158280] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.598s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.158280] env[63379]: DEBUG nova.objects.instance [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'resources' on Instance uuid d4988643-18ff-44c8-8363-e0de43da2abe {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1970.185343] env[63379]: INFO nova.scheduler.client.report [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleted allocations for instance 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576 [ 1970.225033] env[63379]: DEBUG nova.compute.manager [req-d40680ae-0d65-4b6a-b804-aa392bd09945 req-7fe29376-73bd-4647-99e3-c33af4c06d55 service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Received event network-vif-plugged-055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1970.225274] env[63379]: DEBUG oslo_concurrency.lockutils [req-d40680ae-0d65-4b6a-b804-aa392bd09945 req-7fe29376-73bd-4647-99e3-c33af4c06d55 service nova] Acquiring lock "a39c5511-3efc-41e9-8902-692f237557e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1970.225520] env[63379]: DEBUG oslo_concurrency.lockutils [req-d40680ae-0d65-4b6a-b804-aa392bd09945 req-7fe29376-73bd-4647-99e3-c33af4c06d55 service nova] Lock "a39c5511-3efc-41e9-8902-692f237557e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.225659] env[63379]: DEBUG oslo_concurrency.lockutils [req-d40680ae-0d65-4b6a-b804-aa392bd09945 req-7fe29376-73bd-4647-99e3-c33af4c06d55 service nova] Lock "a39c5511-3efc-41e9-8902-692f237557e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.225828] env[63379]: DEBUG nova.compute.manager [req-d40680ae-0d65-4b6a-b804-aa392bd09945 req-7fe29376-73bd-4647-99e3-c33af4c06d55 service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] No waiting events found dispatching network-vif-plugged-055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1970.226010] env[63379]: WARNING nova.compute.manager [req-d40680ae-0d65-4b6a-b804-aa392bd09945 req-7fe29376-73bd-4647-99e3-c33af4c06d55 service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Received unexpected event network-vif-plugged-055e7bb8-6f06-4be0-bf3d-97113330ad89 for instance with vm_state building and task_state spawning. [ 1970.286370] env[63379]: DEBUG nova.network.neutron [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Successfully updated port: 055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1970.444474] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1970.444950] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1970.445168] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1970.445308] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1970.445463] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1970.445607] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1970.445901] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1970.446073] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1970.446187] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1970.446363] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1970.446528] env[63379]: DEBUG nova.virt.hardware [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1970.452315] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1970.452653] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c2402d3-bcbd-4742-92cd-08bf209b0ce4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.471905] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1970.471905] env[63379]: value = "task-1780378" [ 1970.471905] env[63379]: _type = "Task" [ 1970.471905] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.480864] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780378, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.695251] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24731c3c-cd11-40d6-b96c-e47c5f4a6aa4 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.260s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1970.789348] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.790036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.790036] env[63379]: DEBUG nova.network.neutron [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1970.791534] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4b08b2-66a6-46fd-bc7c-3873e5161f99 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.800922] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32a19d7-05e0-45ae-aedc-0478a9edddbe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.831611] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c58815-78ae-40a8-8397-b0e28506f3af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.839372] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603bd917-4a46-4275-88fb-e921eb5fb8e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.853249] env[63379]: DEBUG nova.compute.provider_tree [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1970.985980] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780378, 'name': ReconfigVM_Task, 'duration_secs': 0.198199} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.986297] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1970.987108] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e008a1f-f4db-4710-bc2f-1014b27e412e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.009792] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1971.010125] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ebd575c-bc4d-4a86-891e-f5bece4cbcfc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.028077] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1971.028077] env[63379]: value = "task-1780379" [ 1971.028077] env[63379]: _type = "Task" [ 1971.028077] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.036380] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780379, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.323170] env[63379]: DEBUG nova.network.neutron [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1971.356981] env[63379]: DEBUG nova.scheduler.client.report [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1971.454773] env[63379]: DEBUG nova.network.neutron [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Updating instance_info_cache with network_info: [{"id": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "address": "fa:16:3e:ef:83:58", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap055e7bb8-6f", "ovs_interfaceid": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1971.538145] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780379, 'name': ReconfigVM_Task, 'duration_secs': 0.31105} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.538468] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1971.538707] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance '1c983c16-6f86-4932-9698-7fb1428ca231' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1971.861377] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.883890] env[63379]: INFO nova.scheduler.client.report [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted allocations for instance d4988643-18ff-44c8-8363-e0de43da2abe [ 1971.957176] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.957462] env[63379]: DEBUG nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Instance network_info: |[{"id": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "address": "fa:16:3e:ef:83:58", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap055e7bb8-6f", "ovs_interfaceid": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1971.958207] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:83:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1195acd-707f-4bac-a99d-14db17a63802', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '055e7bb8-6f06-4be0-bf3d-97113330ad89', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1971.965624] env[63379]: DEBUG oslo.service.loopingcall [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1971.965855] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1971.966291] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3281fa8-d28d-4e3a-9fe4-122b31d49175 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.986410] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1971.986410] env[63379]: value = "task-1780380" [ 1971.986410] env[63379]: _type = "Task" [ 1971.986410] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.994138] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780380, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.044967] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd43b0e-659b-4fed-88e7-3ffe972d37fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.067404] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55c3b4e-f261-41b2-9caf-734442b943b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.089760] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance '1c983c16-6f86-4932-9698-7fb1428ca231' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1972.255268] env[63379]: DEBUG nova.compute.manager [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Received event network-changed-055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1972.255467] env[63379]: DEBUG nova.compute.manager [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Refreshing instance network info cache due to event network-changed-055e7bb8-6f06-4be0-bf3d-97113330ad89. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1972.255691] env[63379]: DEBUG oslo_concurrency.lockutils [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] Acquiring lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.255838] env[63379]: DEBUG oslo_concurrency.lockutils [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] Acquired lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.256070] env[63379]: DEBUG nova.network.neutron [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Refreshing network info cache for port 055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1972.391280] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f1944bcc-083b-409f-8181-9d88113d1013 tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "d4988643-18ff-44c8-8363-e0de43da2abe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.236s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.496743] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780380, 'name': CreateVM_Task, 'duration_secs': 0.439388} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.496928] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1972.503853] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.504050] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.504432] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1972.504699] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a25871ff-9943-47b8-9b3b-760a16604668 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.511034] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1972.511034] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52094525-b699-cec0-cc68-a21502406d30" [ 1972.511034] env[63379]: _type = "Task" [ 1972.511034] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.518812] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52094525-b699-cec0-cc68-a21502406d30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.631257] env[63379]: DEBUG nova.network.neutron [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Port 62d6fce2-bf52-422e-8166-344c4fd61274 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1972.824394] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.824648] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.824857] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.825084] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.825262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.827648] env[63379]: INFO nova.compute.manager [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Terminating instance [ 1972.829536] env[63379]: DEBUG nova.compute.manager [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1972.829747] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1972.831115] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a38350-cf68-4c45-a793-88b8b60aac80 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.839379] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1972.841433] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d6395ce-d304-4c16-9ab0-2a5db037c26f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.848738] env[63379]: DEBUG oslo_vmware.api [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1972.848738] env[63379]: value = "task-1780381" [ 1972.848738] env[63379]: _type = "Task" [ 1972.848738] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.856956] env[63379]: DEBUG oslo_vmware.api [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.979141] env[63379]: DEBUG nova.network.neutron [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Updated VIF entry in instance network info cache for port 055e7bb8-6f06-4be0-bf3d-97113330ad89. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1972.979575] env[63379]: DEBUG nova.network.neutron [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Updating instance_info_cache with network_info: [{"id": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "address": "fa:16:3e:ef:83:58", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap055e7bb8-6f", "ovs_interfaceid": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.021432] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52094525-b699-cec0-cc68-a21502406d30, 'name': SearchDatastore_Task, 'duration_secs': 0.010934} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.021758] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.022009] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1973.022262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.022418] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.022614] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1973.022878] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e73b865-3014-4018-b159-6d370c9c4bc2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.032649] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1973.032833] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1973.033576] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba59c076-e9ac-44ea-934f-f53920d727e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.038816] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1973.038816] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5260852f-133b-3379-fb2d-b3266df8a15d" [ 1973.038816] env[63379]: _type = "Task" [ 1973.038816] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.046915] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5260852f-133b-3379-fb2d-b3266df8a15d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.359072] env[63379]: DEBUG oslo_vmware.api [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780381, 'name': PowerOffVM_Task, 'duration_secs': 0.179523} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.359358] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1973.359530] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1973.359780] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cb78e2a-282c-4a56-a704-77fbdd97110a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.482539] env[63379]: DEBUG oslo_concurrency.lockutils [req-8997fc47-462c-4424-ad44-617a129f2dc5 req-f85c0415-b2c9-4d66-989d-7f508fb8ec0c service nova] Releasing lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.517046] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1973.517046] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1973.517046] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleting the datastore file [datastore1] 48c17c3b-1197-46cb-a0f7-3671b2d82c7e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1973.517324] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5139768a-d901-474e-9980-f3126eaa323f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.524245] env[63379]: DEBUG oslo_vmware.api [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for the task: (returnval){ [ 1973.524245] env[63379]: value = "task-1780383" [ 1973.524245] env[63379]: _type = "Task" [ 1973.524245] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.532987] env[63379]: DEBUG oslo_vmware.api [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780383, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.548319] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5260852f-133b-3379-fb2d-b3266df8a15d, 'name': SearchDatastore_Task, 'duration_secs': 0.00949} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.549315] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0034154-22b9-447b-a771-10caedec68cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.555062] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1973.555062] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268d708-dd67-1d6c-e897-15c12430c07f" [ 1973.555062] env[63379]: _type = "Task" [ 1973.555062] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.563255] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268d708-dd67-1d6c-e897-15c12430c07f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.654479] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.654776] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.655014] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.976319] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.976619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.034525] env[63379]: DEBUG oslo_vmware.api [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Task: {'id': task-1780383, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174379} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.034801] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1974.034993] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1974.035208] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1974.035387] env[63379]: INFO nova.compute.manager [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1974.035632] env[63379]: DEBUG oslo.service.loopingcall [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1974.035824] env[63379]: DEBUG nova.compute.manager [-] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1974.035920] env[63379]: DEBUG nova.network.neutron [-] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1974.065304] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5268d708-dd67-1d6c-e897-15c12430c07f, 'name': SearchDatastore_Task, 'duration_secs': 0.009531} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.067459] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.067722] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a39c5511-3efc-41e9-8902-692f237557e1/a39c5511-3efc-41e9-8902-692f237557e1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1974.068037] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1078a4e8-ebbb-4259-ba9d-2fdc041b949c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.076947] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1974.076947] env[63379]: value = "task-1780384" [ 1974.076947] env[63379]: _type = "Task" [ 1974.076947] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.085980] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.337745] env[63379]: DEBUG nova.compute.manager [req-eca44410-fd0a-4b89-ab6b-62ec49632c0b req-8fcd57a7-64a7-4da7-b0d5-bfa6e5fc4216 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Received event network-vif-deleted-4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1974.337967] env[63379]: INFO nova.compute.manager [req-eca44410-fd0a-4b89-ab6b-62ec49632c0b req-8fcd57a7-64a7-4da7-b0d5-bfa6e5fc4216 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Neutron deleted interface 4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191; detaching it from the instance and deleting it from the info cache [ 1974.338256] env[63379]: DEBUG nova.network.neutron [req-eca44410-fd0a-4b89-ab6b-62ec49632c0b req-8fcd57a7-64a7-4da7-b0d5-bfa6e5fc4216 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.479213] env[63379]: DEBUG nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1974.589464] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780384, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.693019] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.693276] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.693518] env[63379]: DEBUG nova.network.neutron [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1974.813989] env[63379]: DEBUG nova.network.neutron [-] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.841354] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93958819-cc33-4bef-850f-67254e222971 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.852100] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15285ec-f92e-4f35-af70-04956cd35640 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.882380] env[63379]: DEBUG nova.compute.manager [req-eca44410-fd0a-4b89-ab6b-62ec49632c0b req-8fcd57a7-64a7-4da7-b0d5-bfa6e5fc4216 service nova] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Detach interface failed, port_id=4b8a4c15-2708-4acd-bcf2-ad9ef2ea4191, reason: Instance 48c17c3b-1197-46cb-a0f7-3671b2d82c7e could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1975.005019] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.005167] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.006796] env[63379]: INFO nova.compute.claims [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1975.089491] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780384, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73073} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.089812] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] a39c5511-3efc-41e9-8902-692f237557e1/a39c5511-3efc-41e9-8902-692f237557e1.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1975.090058] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1975.090345] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59798fb5-67b6-4710-aced-3382afc93287 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.097954] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1975.097954] env[63379]: value = "task-1780385" [ 1975.097954] env[63379]: _type = "Task" [ 1975.097954] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.107702] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780385, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.319212] env[63379]: INFO nova.compute.manager [-] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Took 1.28 seconds to deallocate network for instance. [ 1975.608949] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780385, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066108} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.609342] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1975.610091] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60652e34-3e71-4307-8b33-a0defef5b9cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.633188] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] a39c5511-3efc-41e9-8902-692f237557e1/a39c5511-3efc-41e9-8902-692f237557e1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1975.633456] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a20c4609-fb51-4e16-929d-cd42bb8130e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.657489] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1975.657489] env[63379]: value = "task-1780386" [ 1975.657489] env[63379]: _type = "Task" [ 1975.657489] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.664604] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780386, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.684771] env[63379]: DEBUG nova.network.neutron [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.826052] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.129592] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979c4829-e61a-4639-a348-dbf7434cb258 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.137425] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465020b3-27e7-42bb-9d83-a1260b6dda18 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.170740] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47c8021-e2ba-4349-87a4-b07774719df4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.181721] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdf49d2-f23f-4690-9274-acca38139ab5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.185512] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780386, 'name': ReconfigVM_Task, 'duration_secs': 0.349841} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.185794] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Reconfigured VM instance instance-00000070 to attach disk [datastore1] a39c5511-3efc-41e9-8902-692f237557e1/a39c5511-3efc-41e9-8902-692f237557e1.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1976.186751] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a970c456-70e4-4f38-bc8a-182564bc16a9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.188484] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.202051] env[63379]: DEBUG nova.compute.provider_tree [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1976.206970] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1976.206970] env[63379]: value = "task-1780387" [ 1976.206970] env[63379]: _type = "Task" [ 1976.206970] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.215183] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780387, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.721686] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780387, 'name': Rename_Task, 'duration_secs': 0.153059} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.721686] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1976.722471] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f9e7736-a509-4177-b5ed-ea68fb17eab4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.724423] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d2b928-669f-4668-a54b-fa407fb9c826 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.728878] env[63379]: ERROR nova.scheduler.client.report [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [req-64d9c234-1a23-45c8-8b63-913e6af57a6e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-64d9c234-1a23-45c8-8b63-913e6af57a6e"}]} [ 1976.745693] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf9d80f-b799-4e8e-aa2f-407eec1ab180 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.748657] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 1976.748657] env[63379]: value = "task-1780388" [ 1976.748657] env[63379]: _type = "Task" [ 1976.748657] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.755432] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance '1c983c16-6f86-4932-9698-7fb1428ca231' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1976.759743] env[63379]: DEBUG nova.scheduler.client.report [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1976.765043] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780388, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.778237] env[63379]: DEBUG nova.scheduler.client.report [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1976.779485] env[63379]: DEBUG nova.compute.provider_tree [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1976.794242] env[63379]: DEBUG nova.scheduler.client.report [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1976.814753] env[63379]: DEBUG nova.scheduler.client.report [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1976.936559] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1bd816-811f-4cc5-936e-8b5d55b0ba48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.945180] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9128dc8-a70f-41fb-8f6b-b8b06b9abed0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.976904] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8667dc-a6ec-4eb8-8563-8b53c68fbda3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.985081] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846cdc5f-ef60-4b5c-925d-570184954ba7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.998992] env[63379]: DEBUG nova.compute.provider_tree [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1977.162269] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.162520] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.259653] env[63379]: DEBUG oslo_vmware.api [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780388, 'name': PowerOnVM_Task, 'duration_secs': 0.488753} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.259915] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1977.260161] env[63379]: INFO nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Took 7.30 seconds to spawn the instance on the hypervisor. [ 1977.260379] env[63379]: DEBUG nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1977.261174] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9aa32b-5edf-407e-896c-9ac75397053a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.268788] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1977.271872] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de9ce76b-849b-4b9d-bc4f-496a30a6a84a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.279024] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1977.279024] env[63379]: value = "task-1780389" [ 1977.279024] env[63379]: _type = "Task" [ 1977.279024] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.286957] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.527902] env[63379]: DEBUG nova.scheduler.client.report [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 158 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1977.528237] env[63379]: DEBUG nova.compute.provider_tree [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 158 to 159 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1977.528433] env[63379]: DEBUG nova.compute.provider_tree [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1977.666428] env[63379]: DEBUG nova.compute.utils [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1977.784843] env[63379]: INFO nova.compute.manager [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Took 12.08 seconds to build instance. [ 1977.791780] env[63379]: DEBUG oslo_vmware.api [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780389, 'name': PowerOnVM_Task, 'duration_secs': 0.39126} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.792110] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1977.792357] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cc8ea052-df52-416f-b42c-ab72a16de584 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance '1c983c16-6f86-4932-9698-7fb1428ca231' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1978.033023] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.028s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.033498] env[63379]: DEBUG nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1978.036404] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.211s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.036651] env[63379]: DEBUG nova.objects.instance [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lazy-loading 'resources' on Instance uuid 48c17c3b-1197-46cb-a0f7-3671b2d82c7e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1978.168727] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.287759] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5fbd3280-154a-4b6a-90fd-b30a9997aa03 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.589s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.541299] env[63379]: DEBUG nova.compute.utils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1978.545503] env[63379]: DEBUG nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1978.545709] env[63379]: DEBUG nova.network.neutron [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1978.593214] env[63379]: DEBUG nova.policy [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '756ff556130a4855b461899fece1e1fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3363a90de2d4d5988ddd03974c10d0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1978.666970] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b56e4f-cab2-43b6-b4ee-8d4bf5ecfb9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.675970] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0932f47-3735-459b-b785-4ce36743164d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.709021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ea5fa7-7d4d-472b-ab0b-e0f3dcb7df9f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.716944] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d2ecd2-9bc5-443f-938c-c7f78c4727c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.732127] env[63379]: DEBUG nova.compute.provider_tree [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1978.888168] env[63379]: DEBUG nova.network.neutron [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Successfully created port: 05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1979.049068] env[63379]: DEBUG nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1979.235088] env[63379]: DEBUG nova.scheduler.client.report [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1979.246499] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.246771] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.247017] env[63379]: INFO nova.compute.manager [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Attaching volume 6a4d7edc-98d8-414f-9347-350d38aa7ea4 to /dev/sdb [ 1979.291040] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8569c8f-b0f0-41fd-ad34-831dc799acd9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.299445] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba858820-949a-47fa-92ac-dc2110996d66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.321547] env[63379]: DEBUG nova.virt.block_device [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating existing volume attachment record: d1d7672e-dc22-4a5a-b7dd-fc6ba69665c1 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1979.362860] env[63379]: DEBUG nova.compute.manager [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Received event network-changed-055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1979.363091] env[63379]: DEBUG nova.compute.manager [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Refreshing instance network info cache due to event network-changed-055e7bb8-6f06-4be0-bf3d-97113330ad89. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1979.363420] env[63379]: DEBUG oslo_concurrency.lockutils [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] Acquiring lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.363491] env[63379]: DEBUG oslo_concurrency.lockutils [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] Acquired lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.363622] env[63379]: DEBUG nova.network.neutron [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Refreshing network info cache for port 055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1979.554580] env[63379]: INFO nova.virt.block_device [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Booting with volume eeee8c0e-4fab-40d1-86c6-51050b04b159 at /dev/sda [ 1979.591934] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f61c61b7-0e7e-4a98-9c1e-c6d99b32ccab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.605343] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a12a111-f5ac-436b-883d-839ecf72db8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.636601] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1854b8f4-0a8b-4300-8b89-760f931a9a95 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.647161] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19208bd-d7ef-414d-b96f-308fded9a6bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.686028] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560f1278-e204-45a6-a4e9-8229d5b5e8ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.694150] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2901b39-0183-4011-8ea2-21bbc3af5489 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.712155] env[63379]: DEBUG nova.virt.block_device [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating existing volume attachment record: dc2832ba-4e12-4107-8e16-b7c2d353696e {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1979.740069] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.703s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.765127] env[63379]: INFO nova.scheduler.client.report [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Deleted allocations for instance 48c17c3b-1197-46cb-a0f7-3671b2d82c7e [ 1980.007579] env[63379]: DEBUG nova.network.neutron [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Port 62d6fce2-bf52-422e-8166-344c4fd61274 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1980.007982] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.008559] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.008771] env[63379]: DEBUG nova.network.neutron [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1980.188210] env[63379]: DEBUG nova.network.neutron [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Updated VIF entry in instance network info cache for port 055e7bb8-6f06-4be0-bf3d-97113330ad89. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1980.188606] env[63379]: DEBUG nova.network.neutron [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Updating instance_info_cache with network_info: [{"id": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "address": "fa:16:3e:ef:83:58", "network": {"id": "2c6cbb4b-63db-4c84-91d3-63d6f68cfb71", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-740697972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba1a1cf17f9941b299a6102689835f88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1195acd-707f-4bac-a99d-14db17a63802", "external-id": "nsx-vlan-transportzone-322", "segmentation_id": 322, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap055e7bb8-6f", "ovs_interfaceid": "055e7bb8-6f06-4be0-bf3d-97113330ad89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.275851] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d78a8d36-e0fb-4b38-be27-1ebdc4aaa4ad tempest-AttachInterfacesTestJSON-924600923 tempest-AttachInterfacesTestJSON-924600923-project-member] Lock "48c17c3b-1197-46cb-a0f7-3671b2d82c7e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.451s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.664991] env[63379]: DEBUG nova.compute.manager [req-b58e604d-79f7-43c0-97ba-7227726759c4 req-77941c36-f9de-4a6b-8b07-2c7a2089ff83 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Received event network-vif-plugged-05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1980.665269] env[63379]: DEBUG oslo_concurrency.lockutils [req-b58e604d-79f7-43c0-97ba-7227726759c4 req-77941c36-f9de-4a6b-8b07-2c7a2089ff83 service nova] Acquiring lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.665567] env[63379]: DEBUG oslo_concurrency.lockutils [req-b58e604d-79f7-43c0-97ba-7227726759c4 req-77941c36-f9de-4a6b-8b07-2c7a2089ff83 service nova] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.665917] env[63379]: DEBUG oslo_concurrency.lockutils [req-b58e604d-79f7-43c0-97ba-7227726759c4 req-77941c36-f9de-4a6b-8b07-2c7a2089ff83 service nova] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.665979] env[63379]: DEBUG nova.compute.manager [req-b58e604d-79f7-43c0-97ba-7227726759c4 req-77941c36-f9de-4a6b-8b07-2c7a2089ff83 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] No waiting events found dispatching network-vif-plugged-05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1980.666289] env[63379]: WARNING nova.compute.manager [req-b58e604d-79f7-43c0-97ba-7227726759c4 req-77941c36-f9de-4a6b-8b07-2c7a2089ff83 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Received unexpected event network-vif-plugged-05160396-15ed-49fa-b2de-3793f1f45863 for instance with vm_state building and task_state block_device_mapping. [ 1980.691528] env[63379]: DEBUG oslo_concurrency.lockutils [req-96640384-008d-4c77-b962-86cebeed24f4 req-0cea8e54-53cb-45d3-b10e-6a9b362ed89a service nova] Releasing lock "refresh_cache-a39c5511-3efc-41e9-8902-692f237557e1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.789838] env[63379]: DEBUG nova.network.neutron [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Successfully updated port: 05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1980.814057] env[63379]: DEBUG nova.network.neutron [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.292788] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.293120] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.293120] env[63379]: DEBUG nova.network.neutron [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1981.315537] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.814714] env[63379]: DEBUG nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1981.815287] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1981.815511] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1981.815671] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1981.815857] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1981.816024] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1981.816181] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1981.816395] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1981.816559] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1981.816815] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1981.817034] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1981.817240] env[63379]: DEBUG nova.virt.hardware [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1981.818260] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb895447-c79d-45cb-9e3d-e3e8f1857b9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.821984] env[63379]: DEBUG nova.compute.manager [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63379) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1981.821984] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.822220] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.829958] env[63379]: DEBUG nova.network.neutron [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1981.834324] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ecf962-731f-4e10-b2f4-232eb560df48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.975474] env[63379]: DEBUG nova.network.neutron [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance_info_cache with network_info: [{"id": "05160396-15ed-49fa-b2de-3793f1f45863", "address": "fa:16:3e:b8:60:62", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05160396-15", "ovs_interfaceid": "05160396-15ed-49fa-b2de-3793f1f45863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.327623] env[63379]: DEBUG nova.objects.instance [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'migration_context' on Instance uuid 1c983c16-6f86-4932-9698-7fb1428ca231 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1982.480542] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.480887] env[63379]: DEBUG nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Instance network_info: |[{"id": "05160396-15ed-49fa-b2de-3793f1f45863", "address": "fa:16:3e:b8:60:62", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05160396-15", "ovs_interfaceid": "05160396-15ed-49fa-b2de-3793f1f45863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1982.481361] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:60:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05160396-15ed-49fa-b2de-3793f1f45863', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1982.488872] env[63379]: DEBUG oslo.service.loopingcall [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1982.489123] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1982.489399] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-506a54a6-c0b7-408f-8115-3b80304e9ded {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.511626] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1982.511626] env[63379]: value = "task-1780394" [ 1982.511626] env[63379]: _type = "Task" [ 1982.511626] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.519532] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780394, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.692661] env[63379]: DEBUG nova.compute.manager [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Received event network-changed-05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1982.692934] env[63379]: DEBUG nova.compute.manager [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Refreshing instance network info cache due to event network-changed-05160396-15ed-49fa-b2de-3793f1f45863. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1982.693268] env[63379]: DEBUG oslo_concurrency.lockutils [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] Acquiring lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.693499] env[63379]: DEBUG oslo_concurrency.lockutils [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] Acquired lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.693738] env[63379]: DEBUG nova.network.neutron [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Refreshing network info cache for port 05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1982.956213] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d7bb50-4ed9-45e7-8ff5-b7ea9f03f3b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.965535] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5bd3c1-10c6-46e7-aadb-8269bd7a9e32 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.999855] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d87e8c6-7dea-485b-9591-b4959bbbc07e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.010963] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cf2956-b3ea-43a7-bb83-9a7c6dd847e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.025595] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780394, 'name': CreateVM_Task, 'duration_secs': 0.347842} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.033407] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1983.034031] env[63379]: DEBUG nova.compute.provider_tree [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1983.036513] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369511', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'name': 'volume-eeee8c0e-4fab-40d1-86c6-51050b04b159', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff', 'attached_at': '', 'detached_at': '', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'serial': 'eeee8c0e-4fab-40d1-86c6-51050b04b159'}, 'attachment_id': 'dc2832ba-4e12-4107-8e16-b7c2d353696e', 'boot_index': 0, 'mount_device': '/dev/sda', 'disk_bus': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=63379) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1983.036753] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Root volume attach. Driver type: vmdk {{(pid=63379) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1983.037589] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d733aea3-5980-4edf-ae8b-96463c867cd4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.047554] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d122d2-0c9a-45f4-8348-4d362e3a1241 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.055180] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8d3ece-aa96-4427-95f4-569c63f0ad16 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.068133] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-3b50b904-8f2a-4eb9-8cee-07854f068d0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.084220] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1983.084220] env[63379]: value = "task-1780395" [ 1983.084220] env[63379]: _type = "Task" [ 1983.084220] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.095871] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780395, 'name': RelocateVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.473126] env[63379]: DEBUG nova.network.neutron [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updated VIF entry in instance network info cache for port 05160396-15ed-49fa-b2de-3793f1f45863. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1983.473511] env[63379]: DEBUG nova.network.neutron [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance_info_cache with network_info: [{"id": "05160396-15ed-49fa-b2de-3793f1f45863", "address": "fa:16:3e:b8:60:62", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05160396-15", "ovs_interfaceid": "05160396-15ed-49fa-b2de-3793f1f45863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.537342] env[63379]: DEBUG nova.scheduler.client.report [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1983.577115] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.577375] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.577562] env[63379]: INFO nova.compute.manager [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Shelving [ 1983.596471] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780395, 'name': RelocateVM_Task} progress is 20%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.880667] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1983.880798] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369514', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'name': 'volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'df8d513d-c201-4ffe-894e-cf8c3318cecc', 'attached_at': '', 'detached_at': '', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'serial': '6a4d7edc-98d8-414f-9347-350d38aa7ea4'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1983.881728] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63914ded-dde8-4b81-a1a4-e4460533b7ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.899478] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb345be5-2caf-4139-945a-8be81fcde378 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.926810] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4/volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1983.926992] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfb9b83a-5c67-4b4f-a869-1ec865dfc822 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.946773] env[63379]: DEBUG oslo_vmware.api [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1983.946773] env[63379]: value = "task-1780396" [ 1983.946773] env[63379]: _type = "Task" [ 1983.946773] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.955856] env[63379]: DEBUG oslo_vmware.api [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.976899] env[63379]: DEBUG oslo_concurrency.lockutils [req-36532b58-bec0-41c0-84b1-d4f6e88cd15c req-e437ed4d-34eb-4f22-883c-c75a1b1565c2 service nova] Releasing lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.084680] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1984.084985] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18ba5185-6c1c-4aae-ae46-f279d6d1d8fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.096566] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780395, 'name': RelocateVM_Task, 'duration_secs': 0.66455} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.098013] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1984.098291] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369511', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'name': 'volume-eeee8c0e-4fab-40d1-86c6-51050b04b159', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff', 'attached_at': '', 'detached_at': '', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'serial': 'eeee8c0e-4fab-40d1-86c6-51050b04b159'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1984.098636] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1984.098636] env[63379]: value = "task-1780397" [ 1984.098636] env[63379]: _type = "Task" [ 1984.098636] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.099449] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f8ca07-6087-4692-881a-39271cb3612d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.112838] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780397, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.126594] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e81c67-2c08-4501-a26c-570ed49425ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.150800] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-eeee8c0e-4fab-40d1-86c6-51050b04b159/volume-eeee8c0e-4fab-40d1-86c6-51050b04b159.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1984.151149] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c0e563d-e897-467e-8878-cf7015cda71d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.172418] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1984.172418] env[63379]: value = "task-1780398" [ 1984.172418] env[63379]: _type = "Task" [ 1984.172418] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.197671] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780398, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.458250] env[63379]: DEBUG oslo_vmware.api [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.551022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.727s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.616903] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780397, 'name': PowerOffVM_Task, 'duration_secs': 0.204852} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.617412] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.618492] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790a126f-6b12-408c-852a-45dfe10405fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.643787] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438363e1-ee63-4a40-ae6a-1215cb021ad9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.684273] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780398, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.957926] env[63379]: DEBUG oslo_vmware.api [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780396, 'name': ReconfigVM_Task, 'duration_secs': 0.77169} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.958519] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4/volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1984.963821] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2eb808a-7fca-473f-874d-ed31ce46178b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.981572] env[63379]: DEBUG oslo_vmware.api [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1984.981572] env[63379]: value = "task-1780399" [ 1984.981572] env[63379]: _type = "Task" [ 1984.981572] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.991016] env[63379]: DEBUG oslo_vmware.api [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780399, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.160282] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1985.160282] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2432c683-3499-44f7-915e-401fc2cd0d03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.172880] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1985.172880] env[63379]: value = "task-1780400" [ 1985.172880] env[63379]: _type = "Task" [ 1985.172880] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.192101] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780400, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.192101] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780398, 'name': ReconfigVM_Task, 'duration_secs': 0.551} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.193045] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-eeee8c0e-4fab-40d1-86c6-51050b04b159/volume-eeee8c0e-4fab-40d1-86c6-51050b04b159.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1985.199906] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c921eb35-52eb-4ff9-8ef6-50f260a98229 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.219389] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1985.219389] env[63379]: value = "task-1780401" [ 1985.219389] env[63379]: _type = "Task" [ 1985.219389] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.229857] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780401, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.493397] env[63379]: DEBUG oslo_vmware.api [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780399, 'name': ReconfigVM_Task, 'duration_secs': 0.17064} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.493729] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369514', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'name': 'volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'df8d513d-c201-4ffe-894e-cf8c3318cecc', 'attached_at': '', 'detached_at': '', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'serial': '6a4d7edc-98d8-414f-9347-350d38aa7ea4'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1985.686289] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780400, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.730799] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780401, 'name': ReconfigVM_Task, 'duration_secs': 0.146582} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.731574] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369511', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'name': 'volume-eeee8c0e-4fab-40d1-86c6-51050b04b159', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff', 'attached_at': '', 'detached_at': '', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'serial': 'eeee8c0e-4fab-40d1-86c6-51050b04b159'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1985.732771] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc11f8a8-100e-4399-8200-7939832c3b5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.739805] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1985.739805] env[63379]: value = "task-1780402" [ 1985.739805] env[63379]: _type = "Task" [ 1985.739805] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.751050] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780402, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.089859] env[63379]: INFO nova.compute.manager [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Swapping old allocation on dict_keys(['cf478c89-515f-4372-b90f-4868ab56e978']) held by migration 586ced43-f4f3-4a5a-8699-2d9363caa025 for instance [ 1986.116036] env[63379]: DEBUG nova.scheduler.client.report [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Overwriting current allocation {'allocations': {'cf478c89-515f-4372-b90f-4868ab56e978': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 160}}, 'project_id': '8c01c5c8c3734c4ea066324e542e7374', 'user_id': 'deef4f9ae0754a6c8a7f673c10a76408', 'consumer_generation': 1} on consumer 1c983c16-6f86-4932-9698-7fb1428ca231 {{(pid=63379) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1986.185047] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780400, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.224253] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.224476] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.224660] env[63379]: DEBUG nova.network.neutron [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1986.251117] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780402, 'name': Rename_Task, 'duration_secs': 0.141626} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.251409] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1986.251664] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50402d4d-1b22-4fd6-b4f7-3c262154aa88 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.259796] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1986.259796] env[63379]: value = "task-1780403" [ 1986.259796] env[63379]: _type = "Task" [ 1986.259796] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.268416] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.511779] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "85ecb409-ab53-43d9-8120-2f8c7402d74c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.512175] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.532222] env[63379]: DEBUG nova.objects.instance [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'flavor' on Instance uuid df8d513d-c201-4ffe-894e-cf8c3318cecc {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1986.690156] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780400, 'name': CreateSnapshot_Task, 'duration_secs': 1.092901} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.690156] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1986.690658] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89483cc1-21ce-4a6c-885b-a4057814ed51 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.771211] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780403, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.016234] env[63379]: DEBUG nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1987.041064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f4966bc8-a3c4-43d0-a106-2777a4a2727a tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.793s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.080851] env[63379]: DEBUG nova.network.neutron [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [{"id": "62d6fce2-bf52-422e-8166-344c4fd61274", "address": "fa:16:3e:fe:3b:ec", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d6fce2-bf", "ovs_interfaceid": "62d6fce2-bf52-422e-8166-344c4fd61274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.210703] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1987.210988] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b7178abb-0976-44d6-9ae1-28e4169da6a7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.220409] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1987.220409] env[63379]: value = "task-1780404" [ 1987.220409] env[63379]: _type = "Task" [ 1987.220409] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.229022] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780404, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.270992] env[63379]: DEBUG oslo_vmware.api [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780403, 'name': PowerOnVM_Task, 'duration_secs': 0.713528} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.270992] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1987.271194] env[63379]: INFO nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Took 5.46 seconds to spawn the instance on the hypervisor. [ 1987.271376] env[63379]: DEBUG nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1987.272544] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f4840c-ed08-46f4-8221-4f013a83539c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.538394] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.538705] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.540219] env[63379]: INFO nova.compute.claims [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1987.583676] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-1c983c16-6f86-4932-9698-7fb1428ca231" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1987.584168] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1987.585129] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5217abd0-cc34-4142-8971-ce0370241ae2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.593603] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1987.593603] env[63379]: value = "task-1780405" [ 1987.593603] env[63379]: _type = "Task" [ 1987.593603] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.602764] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.731465] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780404, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.794444] env[63379]: INFO nova.compute.manager [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Took 12.81 seconds to build instance. [ 1987.975707] env[63379]: DEBUG nova.compute.manager [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1988.104430] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780405, 'name': PowerOffVM_Task, 'duration_secs': 0.300234} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.104768] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1988.105462] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1988.105681] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1988.105846] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1988.106045] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1988.106276] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1988.106460] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1988.106674] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1988.106842] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1988.107021] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1988.107199] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1988.107380] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1988.112634] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f183687-ee48-45f3-98ff-fd791554ff77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.129387] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1988.129387] env[63379]: value = "task-1780406" [ 1988.129387] env[63379]: _type = "Task" [ 1988.129387] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.137778] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780406, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.233897] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780404, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.296670] env[63379]: DEBUG oslo_concurrency.lockutils [None req-308527f8-4e2e-4f6d-b1d0-4cfa5a44c83e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.320s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.497810] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.638638] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780406, 'name': ReconfigVM_Task, 'duration_secs': 0.161724} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.639597] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c595eb-4bba-4c16-88de-759a275f8936 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.662396] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1988.662651] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1988.662816] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1988.663020] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1988.663182] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1988.663342] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1988.663554] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1988.664116] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1988.664116] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1988.664116] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1988.664281] env[63379]: DEBUG nova.virt.hardware [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1988.665400] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cafaf5d6-48e4-449c-b776-56f352404243 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.672251] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1988.672251] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8ae6c-79b3-66f6-a286-5979c43b7bdd" [ 1988.672251] env[63379]: _type = "Task" [ 1988.672251] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.682726] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8ae6c-79b3-66f6-a286-5979c43b7bdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.703434] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a05d05-2311-4c14-a1b2-3493b1d8068d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.711944] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f01bc3a-228a-430f-b512-56c3fe7d409b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.745605] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfae823d-ff85-491b-9f08-7a7e854652c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.753536] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780404, 'name': CloneVM_Task, 'duration_secs': 1.484322} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.755636] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Created linked-clone VM from snapshot [ 1988.756434] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533c6eee-a38d-4f9d-b69f-533ce5fcf203 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.759757] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea5ca0b-85c5-4eda-b382-6f437cfbb62b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.776541] env[63379]: DEBUG nova.compute.provider_tree [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1988.777938] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Uploading image 3959d3db-eff7-402f-81f5-8f67a00a1f20 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1988.805014] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1988.805014] env[63379]: value = "vm-369517" [ 1988.805014] env[63379]: _type = "VirtualMachine" [ 1988.805014] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1988.805300] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-13a70ee5-e065-44c5-9daa-32f2ef70f15f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.813507] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lease: (returnval){ [ 1988.813507] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52295f76-7238-4437-77d7-d8e1786a5f1d" [ 1988.813507] env[63379]: _type = "HttpNfcLease" [ 1988.813507] env[63379]: } obtained for exporting VM: (result){ [ 1988.813507] env[63379]: value = "vm-369517" [ 1988.813507] env[63379]: _type = "VirtualMachine" [ 1988.813507] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1988.813787] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the lease: (returnval){ [ 1988.813787] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52295f76-7238-4437-77d7-d8e1786a5f1d" [ 1988.813787] env[63379]: _type = "HttpNfcLease" [ 1988.813787] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1988.821868] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1988.821868] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52295f76-7238-4437-77d7-d8e1786a5f1d" [ 1988.821868] env[63379]: _type = "HttpNfcLease" [ 1988.821868] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1989.008986] env[63379]: DEBUG nova.compute.manager [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Received event network-changed-6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1989.009858] env[63379]: DEBUG nova.compute.manager [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Refreshing instance network info cache due to event network-changed-6cdabd2b-f665-46a9-a86e-2527cfe452bf. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1989.009858] env[63379]: DEBUG oslo_concurrency.lockutils [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] Acquiring lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.009858] env[63379]: DEBUG oslo_concurrency.lockutils [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] Acquired lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.009858] env[63379]: DEBUG nova.network.neutron [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Refreshing network info cache for port 6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1989.184254] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d8ae6c-79b3-66f6-a286-5979c43b7bdd, 'name': SearchDatastore_Task, 'duration_secs': 0.009202} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.190020] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1989.190388] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3efde24b-ec16-417d-81a6-360e5ea4d79b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.209796] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1989.209796] env[63379]: value = "task-1780408" [ 1989.209796] env[63379]: _type = "Task" [ 1989.209796] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.217813] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780408, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.282202] env[63379]: DEBUG nova.scheduler.client.report [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1989.322623] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1989.322623] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52295f76-7238-4437-77d7-d8e1786a5f1d" [ 1989.322623] env[63379]: _type = "HttpNfcLease" [ 1989.322623] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1989.322945] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1989.322945] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52295f76-7238-4437-77d7-d8e1786a5f1d" [ 1989.322945] env[63379]: _type = "HttpNfcLease" [ 1989.322945] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1989.323724] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79f3f33-64b1-4523-8504-9bb2835a4fc4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.332084] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e7dbe3-7148-de6b-05db-f1e21351528a/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1989.332283] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e7dbe3-7148-de6b-05db-f1e21351528a/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1989.423891] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f67db244-d701-4206-9755-21d9c0682968 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.720845] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780408, 'name': ReconfigVM_Task, 'duration_secs': 0.217978} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.721244] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1989.722186] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c43bf9-deac-4bdd-bf56-b171bc7cf7ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.746970] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1989.748231] env[63379]: DEBUG nova.network.neutron [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updated VIF entry in instance network info cache for port 6cdabd2b-f665-46a9-a86e-2527cfe452bf. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1989.748728] env[63379]: DEBUG nova.network.neutron [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [{"id": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "address": "fa:16:3e:bc:a5:55", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cdabd2b-f6", "ovs_interfaceid": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.750295] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65a56f35-3e80-4337-931c-b75d3aeb159c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.773900] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1989.773900] env[63379]: value = "task-1780409" [ 1989.773900] env[63379]: _type = "Task" [ 1989.773900] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.784763] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.787780] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.788470] env[63379]: DEBUG nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1989.791393] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.294s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.265715] env[63379]: DEBUG oslo_concurrency.lockutils [req-3de21084-e0d0-4fd8-8ce5-3e6cc82ad4ea req-d64100b8-1ec6-4c3f-89d4-819ee237453b service nova] Releasing lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.284578] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780409, 'name': ReconfigVM_Task, 'duration_secs': 0.450607} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.285081] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231/1c983c16-6f86-4932-9698-7fb1428ca231.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1990.286175] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3dac07-6395-419a-90a8-a8f1f498db83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.314031] env[63379]: DEBUG nova.compute.utils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1990.318704] env[63379]: INFO nova.compute.claims [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1990.323724] env[63379]: DEBUG nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1990.326419] env[63379]: DEBUG nova.network.neutron [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1990.335111] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0512eb56-38b9-4d7f-b7d7-1d89602757ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.356252] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e1a25f-c0fa-4b65-9f3b-d6dff3d54651 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.377476] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2618da24-0cbc-4081-bccb-bd639847df09 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.382048] env[63379]: DEBUG nova.policy [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2e7c2125f0044508dc4016c4de224e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9746ae945355479fa5880802e08d2b0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 1990.390335] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1990.390814] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0004d7f4-49ce-412f-af88-4d8cfff10b66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.398624] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1990.398624] env[63379]: value = "task-1780410" [ 1990.398624] env[63379]: _type = "Task" [ 1990.398624] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.407891] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780410, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.430892] env[63379]: DEBUG nova.compute.manager [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Stashing vm_state: active {{(pid=63379) _prep_resize /opt/stack/nova/nova/compute/manager.py:5671}} [ 1990.698648] env[63379]: DEBUG nova.network.neutron [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Successfully created port: ef74ed84-a494-4ce8-a037-458fd0285f2b {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1990.825255] env[63379]: INFO nova.compute.resource_tracker [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating resource usage from migration 303b138d-23ef-452b-9f59-f1d338dae5c7 [ 1990.829306] env[63379]: DEBUG nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1990.913177] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780410, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.952164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.985589] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae9ea0d-e490-462b-9d2c-23034acf2b9b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.994864] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5430c8d5-c5b7-495a-b603-9f69af28a8e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.030345] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d38966e-f922-456b-b8be-10ef79c124da {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.036451] env[63379]: DEBUG nova.compute.manager [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Received event network-changed-05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1991.036963] env[63379]: DEBUG nova.compute.manager [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Refreshing instance network info cache due to event network-changed-05160396-15ed-49fa-b2de-3793f1f45863. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1991.037402] env[63379]: DEBUG oslo_concurrency.lockutils [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] Acquiring lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.037762] env[63379]: DEBUG oslo_concurrency.lockutils [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] Acquired lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.038110] env[63379]: DEBUG nova.network.neutron [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Refreshing network info cache for port 05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1991.046137] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8688ec-c2b3-4c0c-97dd-0d859cb42453 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.062902] env[63379]: DEBUG nova.compute.provider_tree [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1991.413546] env[63379]: DEBUG oslo_vmware.api [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780410, 'name': PowerOnVM_Task, 'duration_secs': 0.832791} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.414942] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1991.585372] env[63379]: ERROR nova.scheduler.client.report [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [req-5cc5600d-d7bb-4f84-838c-4ddfdcdba70e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5cc5600d-d7bb-4f84-838c-4ddfdcdba70e"}]} [ 1991.604961] env[63379]: DEBUG nova.scheduler.client.report [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1991.619426] env[63379]: DEBUG nova.scheduler.client.report [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1991.619769] env[63379]: DEBUG nova.compute.provider_tree [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1991.632606] env[63379]: DEBUG nova.scheduler.client.report [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1991.651780] env[63379]: DEBUG nova.scheduler.client.report [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1991.794130] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ea56a2-8713-4de6-a868-24cefd214e9a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.804421] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88861473-6ae7-4d13-8cb2-713583793c03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.839197] env[63379]: DEBUG nova.network.neutron [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updated VIF entry in instance network info cache for port 05160396-15ed-49fa-b2de-3793f1f45863. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1991.839602] env[63379]: DEBUG nova.network.neutron [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance_info_cache with network_info: [{"id": "05160396-15ed-49fa-b2de-3793f1f45863", "address": "fa:16:3e:b8:60:62", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05160396-15", "ovs_interfaceid": "05160396-15ed-49fa-b2de-3793f1f45863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.841484] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3f9a83-dbf7-45e5-87fa-f1be363df147 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.845144] env[63379]: DEBUG nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1991.854471] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaae74ee-e354-4cd9-b696-8a09bb44573c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.871432] env[63379]: DEBUG nova.compute.provider_tree [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1991.880949] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1991.881207] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1991.881375] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1991.881574] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1991.881714] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1991.881865] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1991.882107] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1991.882281] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1991.882455] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1991.882654] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1991.882812] env[63379]: DEBUG nova.virt.hardware [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1991.883687] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984da9ec-aa14-4e9e-b7d8-e6ff959138e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.893796] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0883f4-f3f3-4ed2-9989-4a7e0a0896be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.267191] env[63379]: DEBUG nova.network.neutron [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Successfully updated port: ef74ed84-a494-4ce8-a037-458fd0285f2b {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1992.348172] env[63379]: DEBUG oslo_concurrency.lockutils [req-1445fb25-5125-491b-bbd2-3d5c7e7b4b13 req-c8ee902a-4ae5-4871-bc96-9aca884754d2 service nova] Releasing lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1992.409092] env[63379]: DEBUG nova.scheduler.client.report [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 165 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1992.409437] env[63379]: DEBUG nova.compute.provider_tree [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 165 to 166 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1992.409665] env[63379]: DEBUG nova.compute.provider_tree [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1992.424516] env[63379]: INFO nova.compute.manager [None req-ec8c9d67-d25d-4043-9303-0d158a71d020 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance to original state: 'active' [ 1992.769613] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.769794] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.769963] env[63379]: DEBUG nova.network.neutron [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1992.915258] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.124s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.915644] env[63379]: INFO nova.compute.manager [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Migrating [ 1992.922638] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.971s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.061845] env[63379]: DEBUG nova.compute.manager [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Received event network-vif-plugged-ef74ed84-a494-4ce8-a037-458fd0285f2b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1993.062166] env[63379]: DEBUG oslo_concurrency.lockutils [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] Acquiring lock "85ecb409-ab53-43d9-8120-2f8c7402d74c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.062425] env[63379]: DEBUG oslo_concurrency.lockutils [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.062643] env[63379]: DEBUG oslo_concurrency.lockutils [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.062859] env[63379]: DEBUG nova.compute.manager [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] No waiting events found dispatching network-vif-plugged-ef74ed84-a494-4ce8-a037-458fd0285f2b {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1993.063084] env[63379]: WARNING nova.compute.manager [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Received unexpected event network-vif-plugged-ef74ed84-a494-4ce8-a037-458fd0285f2b for instance with vm_state building and task_state spawning. [ 1993.063274] env[63379]: DEBUG nova.compute.manager [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Received event network-changed-ef74ed84-a494-4ce8-a037-458fd0285f2b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1993.063475] env[63379]: DEBUG nova.compute.manager [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Refreshing instance network info cache due to event network-changed-ef74ed84-a494-4ce8-a037-458fd0285f2b. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 1993.063683] env[63379]: DEBUG oslo_concurrency.lockutils [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] Acquiring lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1993.307438] env[63379]: DEBUG nova.network.neutron [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1993.433892] env[63379]: INFO nova.compute.claims [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1993.439164] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1993.439475] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1993.439669] env[63379]: DEBUG nova.network.neutron [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1993.494775] env[63379]: DEBUG nova.network.neutron [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Updating instance_info_cache with network_info: [{"id": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "address": "fa:16:3e:91:bd:3a", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef74ed84-a4", "ovs_interfaceid": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.944189] env[63379]: INFO nova.compute.resource_tracker [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating resource usage from migration 4814e176-a955-41b5-bf25-9bfbcbc945cb [ 1993.959607] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "1c983c16-6f86-4932-9698-7fb1428ca231" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.959914] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.960150] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.960406] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.960613] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.963677] env[63379]: INFO nova.compute.manager [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Terminating instance [ 1993.970028] env[63379]: DEBUG nova.compute.manager [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1993.970028] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1993.970499] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc2f2ef-5300-4dde-b3a2-0ff4b3d97455 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.979885] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1993.982162] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d553b32c-d2e5-4a63-b834-ea4d35137abf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.990744] env[63379]: DEBUG oslo_vmware.api [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1993.990744] env[63379]: value = "task-1780411" [ 1993.990744] env[63379]: _type = "Task" [ 1993.990744] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.001060] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.001558] env[63379]: DEBUG nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Instance network_info: |[{"id": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "address": "fa:16:3e:91:bd:3a", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef74ed84-a4", "ovs_interfaceid": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1994.001796] env[63379]: DEBUG oslo_vmware.api [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.004980] env[63379]: DEBUG oslo_concurrency.lockutils [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] Acquired lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.005248] env[63379]: DEBUG nova.network.neutron [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Refreshing network info cache for port ef74ed84-a494-4ce8-a037-458fd0285f2b {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1994.006726] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:bd:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef74ed84-a494-4ce8-a037-458fd0285f2b', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1994.015544] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating folder: Project (9746ae945355479fa5880802e08d2b0a). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1994.019292] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73b489cf-02c7-480b-9f45-45e2b93e98c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.037705] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created folder: Project (9746ae945355479fa5880802e08d2b0a) in parent group-v369214. [ 1994.037909] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating folder: Instances. Parent ref: group-v369518. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1994.038235] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48e48138-f7a5-4d9c-9e28-1928143b5282 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.054266] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created folder: Instances in parent group-v369518. [ 1994.054266] env[63379]: DEBUG oslo.service.loopingcall [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1994.054266] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1994.054266] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cf99047-5474-4b76-ab9f-7a5d6e95c57e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.086478] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1994.086478] env[63379]: value = "task-1780414" [ 1994.086478] env[63379]: _type = "Task" [ 1994.086478] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.098858] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780414, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.150048] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923254a0-c643-4914-b4a6-29aaaa7cd1e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.161367] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6492fb-4d6a-489d-904f-7dcfe6c4cb81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.213377] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd42b12-d595-45ca-a281-16590105ef08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.227144] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cda5f4-026c-473d-8a52-67df1132c5fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.250583] env[63379]: DEBUG nova.compute.provider_tree [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1994.254081] env[63379]: DEBUG nova.network.neutron [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.297544] env[63379]: DEBUG nova.network.neutron [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Updated VIF entry in instance network info cache for port ef74ed84-a494-4ce8-a037-458fd0285f2b. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1994.297958] env[63379]: DEBUG nova.network.neutron [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Updating instance_info_cache with network_info: [{"id": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "address": "fa:16:3e:91:bd:3a", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef74ed84-a4", "ovs_interfaceid": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.501487] env[63379]: DEBUG oslo_vmware.api [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780411, 'name': PowerOffVM_Task, 'duration_secs': 0.300153} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.501724] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1994.501900] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1994.502195] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d711e1e-bbb4-401f-a223-d962bac0cadb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.597726] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780414, 'name': CreateVM_Task, 'duration_secs': 0.428874} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.597913] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1994.598851] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.599089] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.599626] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1994.599982] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10fa9bc4-e3c0-4232-b636-76507929e397 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.605418] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1994.605418] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0dfb2-2b45-c538-3140-ab3992eeab73" [ 1994.605418] env[63379]: _type = "Task" [ 1994.605418] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.616311] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0dfb2-2b45-c538-3140-ab3992eeab73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.760382] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.762493] env[63379]: DEBUG nova.scheduler.client.report [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1994.801473] env[63379]: DEBUG oslo_concurrency.lockutils [req-cedf6790-67b4-44a8-a0cd-2bf118340dae req-70e83648-2f03-432f-8d62-a8e402214da0 service nova] Releasing lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1994.974041] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1994.974402] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1994.974473] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleting the datastore file [datastore1] 1c983c16-6f86-4932-9698-7fb1428ca231 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1994.975016] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f63f1e8-32ea-4037-b793-4897bb4702f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.982951] env[63379]: DEBUG oslo_vmware.api [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 1994.982951] env[63379]: value = "task-1780416" [ 1994.982951] env[63379]: _type = "Task" [ 1994.982951] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.992361] env[63379]: DEBUG oslo_vmware.api [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.117645] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0dfb2-2b45-c538-3140-ab3992eeab73, 'name': SearchDatastore_Task, 'duration_secs': 0.015242} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.117988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.118310] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1995.118568] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.118731] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.118916] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1995.119234] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-623de251-6c05-49c0-8e74-e85d1ab2318a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.129817] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1995.130033] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1995.130799] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-562a6d41-6687-44ad-afc3-01f180a90dfe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.136902] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1995.136902] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c0064f-c779-72da-8451-3381c7b6533d" [ 1995.136902] env[63379]: _type = "Task" [ 1995.136902] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.145485] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c0064f-c779-72da-8451-3381c7b6533d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.272374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.350s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.272653] env[63379]: INFO nova.compute.manager [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Migrating [ 1995.494988] env[63379]: DEBUG oslo_vmware.api [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291134} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.495427] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1995.495701] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1995.495972] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1995.496226] env[63379]: INFO nova.compute.manager [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Took 1.53 seconds to destroy the instance on the hypervisor. [ 1995.496539] env[63379]: DEBUG oslo.service.loopingcall [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1995.496792] env[63379]: DEBUG nova.compute.manager [-] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1995.496911] env[63379]: DEBUG nova.network.neutron [-] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1995.648954] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c0064f-c779-72da-8451-3381c7b6533d, 'name': SearchDatastore_Task, 'duration_secs': 0.01363} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.649914] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72873ab0-c670-4325-a545-046ef18d9ef3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.656242] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1995.656242] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52be19b1-abff-5c3b-e706-2382d9665063" [ 1995.656242] env[63379]: _type = "Task" [ 1995.656242] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.667069] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52be19b1-abff-5c3b-e706-2382d9665063, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.787284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.787569] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.787776] env[63379]: DEBUG nova.network.neutron [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1996.045907] env[63379]: DEBUG nova.compute.manager [req-be9c260d-3bab-436a-9a8b-e92da7372cb3 req-45b1d120-2bfc-4eb6-9b5c-29ac885397aa service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Received event network-vif-deleted-62d6fce2-bf52-422e-8166-344c4fd61274 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 1996.046200] env[63379]: INFO nova.compute.manager [req-be9c260d-3bab-436a-9a8b-e92da7372cb3 req-45b1d120-2bfc-4eb6-9b5c-29ac885397aa service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Neutron deleted interface 62d6fce2-bf52-422e-8166-344c4fd61274; detaching it from the instance and deleting it from the info cache [ 1996.046376] env[63379]: DEBUG nova.network.neutron [req-be9c260d-3bab-436a-9a8b-e92da7372cb3 req-45b1d120-2bfc-4eb6-9b5c-29ac885397aa service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.170813] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52be19b1-abff-5c3b-e706-2382d9665063, 'name': SearchDatastore_Task, 'duration_secs': 0.011022} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.171148] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.171436] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 85ecb409-ab53-43d9-8120-2f8c7402d74c/85ecb409-ab53-43d9-8120-2f8c7402d74c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1996.171702] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02f7d0e8-276e-4a57-b89f-30ec8ae18125 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.181222] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1996.181222] env[63379]: value = "task-1780417" [ 1996.181222] env[63379]: _type = "Task" [ 1996.181222] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.191596] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.288551] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10578faa-e8e5-4969-b59a-cf906964e0df {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.320472] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance 'df8d513d-c201-4ffe-894e-cf8c3318cecc' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1996.520533] env[63379]: DEBUG nova.network.neutron [-] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.537747] env[63379]: DEBUG nova.network.neutron [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance_info_cache with network_info: [{"id": "05160396-15ed-49fa-b2de-3793f1f45863", "address": "fa:16:3e:b8:60:62", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05160396-15", "ovs_interfaceid": "05160396-15ed-49fa-b2de-3793f1f45863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.548879] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ae894b0-bb62-4463-b7fa-3eb3ec647aa0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.560794] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c65031-f383-44b1-a670-4d12bdb07ab5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.595040] env[63379]: DEBUG nova.compute.manager [req-be9c260d-3bab-436a-9a8b-e92da7372cb3 req-45b1d120-2bfc-4eb6-9b5c-29ac885397aa service nova] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Detach interface failed, port_id=62d6fce2-bf52-422e-8166-344c4fd61274, reason: Instance 1c983c16-6f86-4932-9698-7fb1428ca231 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 1996.694026] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780417, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.827980] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1996.829105] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bd54fdc-fed8-459e-8ae3-8706a6ecbb46 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.839619] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1996.839619] env[63379]: value = "task-1780418" [ 1996.839619] env[63379]: _type = "Task" [ 1996.839619] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.851974] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.923365] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e7dbe3-7148-de6b-05db-f1e21351528a/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1996.924563] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7bc234-777e-43b6-9e7d-f9e229a2ad81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.932576] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e7dbe3-7148-de6b-05db-f1e21351528a/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1996.932778] env[63379]: ERROR oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e7dbe3-7148-de6b-05db-f1e21351528a/disk-0.vmdk due to incomplete transfer. [ 1996.933075] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6d5f2568-0884-4148-8873-f21fccd05434 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.942834] env[63379]: DEBUG oslo_vmware.rw_handles [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e7dbe3-7148-de6b-05db-f1e21351528a/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1996.943118] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Uploaded image 3959d3db-eff7-402f-81f5-8f67a00a1f20 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1996.947188] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1996.947548] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cdceb2fc-3807-403d-a3d9-c9e244e96b22 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.955897] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1996.955897] env[63379]: value = "task-1780419" [ 1996.955897] env[63379]: _type = "Task" [ 1996.955897] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.967284] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780419, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.024088] env[63379]: INFO nova.compute.manager [-] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Took 1.53 seconds to deallocate network for instance. [ 1997.041245] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.192510] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780417, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.802159} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.192784] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 85ecb409-ab53-43d9-8120-2f8c7402d74c/85ecb409-ab53-43d9-8120-2f8c7402d74c.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1997.192985] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1997.193583] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7b849b4-9c9f-4179-87c1-9a3104072f5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.201595] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1997.201595] env[63379]: value = "task-1780420" [ 1997.201595] env[63379]: _type = "Task" [ 1997.201595] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.210152] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780420, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.349247] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780418, 'name': PowerOffVM_Task, 'duration_secs': 0.234899} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.349482] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1997.349674] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance 'df8d513d-c201-4ffe-894e-cf8c3318cecc' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1997.465813] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780419, 'name': Destroy_Task, 'duration_secs': 0.448645} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.466209] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Destroyed the VM [ 1997.466534] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1997.466800] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-802d5f91-a2fa-4da3-9fd9-da93227ae773 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.473258] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1997.473258] env[63379]: value = "task-1780421" [ 1997.473258] env[63379]: _type = "Task" [ 1997.473258] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.480869] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780421, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.530611] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.530924] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.531179] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.557294] env[63379]: INFO nova.scheduler.client.report [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted allocations for instance 1c983c16-6f86-4932-9698-7fb1428ca231 [ 1997.711704] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780420, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060238} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.712109] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1997.712949] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a550a690-3152-419f-8863-ea765b31aa4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.735257] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 85ecb409-ab53-43d9-8120-2f8c7402d74c/85ecb409-ab53-43d9-8120-2f8c7402d74c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1997.735602] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b376cca-38e1-4638-9b42-e43ff5753bee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.755307] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1997.755307] env[63379]: value = "task-1780422" [ 1997.755307] env[63379]: _type = "Task" [ 1997.755307] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.766587] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780422, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1997.858114] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1997.860030] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1997.867330] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efd250ee-e943-4e35-815b-777a70750f43 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.885047] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1997.885047] env[63379]: value = "task-1780423" [ 1997.885047] env[63379]: _type = "Task" [ 1997.885047] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.896189] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.987250] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780421, 'name': RemoveSnapshot_Task, 'duration_secs': 0.371614} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.987725] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1997.988246] env[63379]: DEBUG nova.compute.manager [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1997.989466] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac2f1ff-6d84-47f2-8bc7-ab05aaacebbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.064451] env[63379]: DEBUG oslo_concurrency.lockutils [None req-2fdd3d94-034d-4763-a3d8-2ac3f517e416 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "1c983c16-6f86-4932-9698-7fb1428ca231" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.104s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.266694] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780422, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.394764] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780423, 'name': ReconfigVM_Task, 'duration_secs': 0.374623} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.395098] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance 'df8d513d-c201-4ffe-894e-cf8c3318cecc' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1998.504067] env[63379]: INFO nova.compute.manager [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Shelve offloading [ 1998.505883] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1998.506172] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3150e26-9118-4729-b667-19c821f36fc8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.513644] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 1998.513644] env[63379]: value = "task-1780424" [ 1998.513644] env[63379]: _type = "Task" [ 1998.513644] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.522012] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.555397] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e091aa-6e61-4f8b-99f3-76ab8116eafc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.573438] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff' progress to 0 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1998.767261] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780422, 'name': ReconfigVM_Task, 'duration_secs': 0.556563} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.767552] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 85ecb409-ab53-43d9-8120-2f8c7402d74c/85ecb409-ab53-43d9-8120-2f8c7402d74c.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1998.768212] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e3003a4-349c-454f-849b-cd48aacae947 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.774478] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1998.774478] env[63379]: value = "task-1780425" [ 1998.774478] env[63379]: _type = "Task" [ 1998.774478] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.782201] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780425, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.901788] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1998.902027] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1998.902218] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1998.902458] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1998.902563] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1998.902715] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1998.902932] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1998.903142] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1998.903294] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1998.903468] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1998.903648] env[63379]: DEBUG nova.virt.hardware [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1998.909102] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1998.909434] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5695549-1dfc-46f9-9e77-bccb51c891f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.929758] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1998.929758] env[63379]: value = "task-1780426" [ 1998.929758] env[63379]: _type = "Task" [ 1998.929758] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.938209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.938467] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.945547] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780426, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.026648] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1999.026863] env[63379]: DEBUG nova.compute.manager [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1999.027715] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e062f7-51ef-4037-91b5-1ef4d1c5bf37 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.036195] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1999.036385] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1999.036578] env[63379]: DEBUG nova.network.neutron [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1999.079719] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1999.080485] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d001ac2-5c97-45ff-9f80-6558b33dfe52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.089417] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 1999.089417] env[63379]: value = "task-1780427" [ 1999.089417] env[63379]: _type = "Task" [ 1999.089417] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.099023] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.285020] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780425, 'name': Rename_Task, 'duration_secs': 0.132363} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.285406] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1999.285719] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f4d71f8-c337-49d8-b30c-0dbdaa12f4de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.292343] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 1999.292343] env[63379]: value = "task-1780428" [ 1999.292343] env[63379]: _type = "Task" [ 1999.292343] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.300688] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.442782] env[63379]: DEBUG nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1999.445724] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780426, 'name': ReconfigVM_Task, 'duration_secs': 0.236262} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.445985] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1999.447069] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5552331c-8b06-4260-a588-f0d6e6eee1f4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.475238] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1999.475567] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f19b1fe-43a8-420f-b355-d73cc9e69e93 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.494867] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 1999.494867] env[63379]: value = "task-1780429" [ 1999.494867] env[63379]: _type = "Task" [ 1999.494867] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.503633] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780429, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.600095] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780427, 'name': PowerOffVM_Task, 'duration_secs': 0.201498} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.600361] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1999.600558] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff' progress to 17 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1999.750380] env[63379]: DEBUG nova.network.neutron [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": "7dc69df5-b3d2-494c-b700-584c31779f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.803063] env[63379]: DEBUG oslo_vmware.api [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780428, 'name': PowerOnVM_Task, 'duration_secs': 0.438817} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.803363] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1999.803608] env[63379]: INFO nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Took 7.96 seconds to spawn the instance on the hypervisor. [ 1999.803813] env[63379]: DEBUG nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1999.804665] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b34c06d-1088-476a-b5d7-4e256fa78ada {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.966202] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.966419] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.967938] env[63379]: INFO nova.compute.claims [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2000.006932] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780429, 'name': ReconfigVM_Task, 'duration_secs': 0.323156} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.007816] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfigured VM instance instance-0000006c to attach disk [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2000.008105] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance 'df8d513d-c201-4ffe-894e-cf8c3318cecc' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2000.108736] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2000.108986] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2000.109168] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2000.109359] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2000.109511] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2000.109661] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2000.109864] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2000.110041] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2000.110220] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2000.110388] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2000.110578] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2000.115799] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c05322a-b319-4b86-8973-cf4403e69b0c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.134485] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2000.134485] env[63379]: value = "task-1780430" [ 2000.134485] env[63379]: _type = "Task" [ 2000.134485] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.143498] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780430, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.253133] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2000.321371] env[63379]: INFO nova.compute.manager [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Took 12.80 seconds to build instance. [ 2000.515479] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b73bb33-0878-468a-b27d-db40af643d5d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.539800] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58ad877-c1dd-4396-bca9-ead276cdeec0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.562169] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance 'df8d513d-c201-4ffe-894e-cf8c3318cecc' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2000.616255] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2000.617175] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fda13ff-803c-464c-af13-06b97f3296cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.623976] env[63379]: DEBUG nova.compute.manager [req-66fe163f-ceb7-470f-8507-ba8ff4194da1 req-f4618259-6846-4199-86da-409681f460be service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received event network-vif-unplugged-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2000.624378] env[63379]: DEBUG oslo_concurrency.lockutils [req-66fe163f-ceb7-470f-8507-ba8ff4194da1 req-f4618259-6846-4199-86da-409681f460be service nova] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.624769] env[63379]: DEBUG oslo_concurrency.lockutils [req-66fe163f-ceb7-470f-8507-ba8ff4194da1 req-f4618259-6846-4199-86da-409681f460be service nova] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.625133] env[63379]: DEBUG oslo_concurrency.lockutils [req-66fe163f-ceb7-470f-8507-ba8ff4194da1 req-f4618259-6846-4199-86da-409681f460be service nova] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.625500] env[63379]: DEBUG nova.compute.manager [req-66fe163f-ceb7-470f-8507-ba8ff4194da1 req-f4618259-6846-4199-86da-409681f460be service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] No waiting events found dispatching network-vif-unplugged-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2000.625830] env[63379]: WARNING nova.compute.manager [req-66fe163f-ceb7-470f-8507-ba8ff4194da1 req-f4618259-6846-4199-86da-409681f460be service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received unexpected event network-vif-unplugged-7dc69df5-b3d2-494c-b700-584c31779f9a for instance with vm_state shelved and task_state shelving_offloading. [ 2000.633116] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2000.633502] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4200b01-4200-4ff6-b4e6-79b762021dde {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.651899] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780430, 'name': ReconfigVM_Task, 'duration_secs': 0.180074} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.652445] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff' progress to 33 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2000.740592] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2000.740860] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2000.741064] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleting the datastore file [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2000.741349] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0603dea-4647-424e-a1fa-c80b0b5f3426 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.748013] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2000.748013] env[63379]: value = "task-1780432" [ 2000.748013] env[63379]: _type = "Task" [ 2000.748013] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.757979] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.823493] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a3d13d8-ac08-4705-b828-ef7dabec4234 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.311s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.112178] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67328576-e366-47c4-987a-f2bdd9a4893f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.121021] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a0ee30-893f-4e43-b0e4-bd6cd4aaa409 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.156107] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee9a8cb-0d33-407e-9ed6-670cd7893744 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.160436] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2001.160670] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2001.160838] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2001.161172] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2001.161172] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2001.161331] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2001.161541] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2001.164929] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2001.164929] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2001.164929] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2001.164929] env[63379]: DEBUG nova.virt.hardware [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2001.167373] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2001.167654] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc6b437c-bb05-47b4-88da-fd27b36bd48a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.186785] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a958ee67-29d9-4e28-b737-52c736fe03ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.191418] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2001.191418] env[63379]: value = "task-1780433" [ 2001.191418] env[63379]: _type = "Task" [ 2001.191418] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.202498] env[63379]: DEBUG nova.compute.provider_tree [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.209684] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.259695] env[63379]: DEBUG oslo_vmware.api [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251666} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.259964] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2001.260184] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2001.260370] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2001.275747] env[63379]: INFO nova.scheduler.client.report [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleted allocations for instance 10fc842d-b821-4103-b6a5-f5b2fc46ea74 [ 2001.705027] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780433, 'name': ReconfigVM_Task, 'duration_secs': 0.167199} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.705395] env[63379]: DEBUG nova.scheduler.client.report [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2001.708521] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2001.711117] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f529fe14-0c2f-481c-8245-ce195ce1a294 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.735343] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-eeee8c0e-4fab-40d1-86c6-51050b04b159/volume-eeee8c0e-4fab-40d1-86c6-51050b04b159.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2001.736545] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6715e4e-885b-4f33-990a-e39c307775cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.756520] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2001.756520] env[63379]: value = "task-1780434" [ 2001.756520] env[63379]: _type = "Task" [ 2001.756520] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.766245] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780434, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.772667] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "20649b93-78ac-4805-aa24-5dbfef9d766b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.773163] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.782078] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.210319] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.210916] env[63379]: DEBUG nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2002.213850] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.432s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.214110] env[63379]: DEBUG nova.objects.instance [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lazy-loading 'resources' on Instance uuid 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.252434] env[63379]: DEBUG nova.network.neutron [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Port 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2002.267075] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780434, 'name': ReconfigVM_Task, 'duration_secs': 0.248832} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.267409] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-eeee8c0e-4fab-40d1-86c6-51050b04b159/volume-eeee8c0e-4fab-40d1-86c6-51050b04b159.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2002.267727] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff' progress to 50 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2002.275616] env[63379]: DEBUG nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2002.685612] env[63379]: DEBUG nova.compute.manager [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received event network-changed-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2002.685838] env[63379]: DEBUG nova.compute.manager [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Refreshing instance network info cache due to event network-changed-7dc69df5-b3d2-494c-b700-584c31779f9a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2002.686148] env[63379]: DEBUG oslo_concurrency.lockutils [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] Acquiring lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2002.686320] env[63379]: DEBUG oslo_concurrency.lockutils [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] Acquired lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.686495] env[63379]: DEBUG nova.network.neutron [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Refreshing network info cache for port 7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2002.717529] env[63379]: DEBUG nova.compute.utils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2002.719105] env[63379]: DEBUG nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2002.719285] env[63379]: DEBUG nova.network.neutron [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2002.721714] env[63379]: DEBUG nova.objects.instance [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lazy-loading 'numa_topology' on Instance uuid 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.774776] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487a599d-0f7c-4fd8-9c2a-cc7d36b0afde {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.800032] env[63379]: DEBUG nova.policy [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'deef4f9ae0754a6c8a7f673c10a76408', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c01c5c8c3734c4ea066324e542e7374', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2002.802493] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.803338] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7b4db4-83a1-4c3c-b650-a62ee6bbbfce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.825131] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff' progress to 67 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2003.109321] env[63379]: DEBUG nova.network.neutron [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Successfully created port: 61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2003.224516] env[63379]: DEBUG nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2003.227096] env[63379]: DEBUG nova.objects.base [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Object Instance<10fc842d-b821-4103-b6a5-f5b2fc46ea74> lazy-loaded attributes: resources,numa_topology {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2003.288764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.288764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.288764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.394421] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d427fdce-fe82-425d-9500-41a34991e1bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.403054] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d550dd8-4f0c-41bf-9f2a-bd7cb759003e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.435521] env[63379]: DEBUG nova.network.neutron [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updated VIF entry in instance network info cache for port 7dc69df5-b3d2-494c-b700-584c31779f9a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2003.436064] env[63379]: DEBUG nova.network.neutron [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.437977] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e842a7-83af-4e1c-b310-cf98fdab0c23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.446584] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f933ec-46af-4219-88cf-3d90cefbf604 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.461409] env[63379]: DEBUG nova.compute.provider_tree [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2003.787575] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.941450] env[63379]: DEBUG oslo_concurrency.lockutils [req-23ffe79a-c538-4fc1-bb93-bbe816535ce1 req-c8a5c89f-91ba-4fa9-9094-4550a3c49557 service nova] Releasing lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.980118] env[63379]: ERROR nova.scheduler.client.report [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [req-efca3ef7-621f-42e5-90a5-628ea9a82ce3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-efca3ef7-621f-42e5-90a5-628ea9a82ce3"}]} [ 2003.996962] env[63379]: DEBUG nova.scheduler.client.report [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2004.010316] env[63379]: DEBUG nova.scheduler.client.report [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2004.010558] env[63379]: DEBUG nova.compute.provider_tree [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2004.020922] env[63379]: DEBUG nova.scheduler.client.report [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2004.037642] env[63379]: DEBUG nova.scheduler.client.report [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2004.189199] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d90118-e661-4d8a-8566-cc1d94fd0b4a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.197423] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba11cd93-1899-4ade-a684-b09f010934a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.227418] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8a0372-df8e-4ff0-9fe8-8cd4a323b17a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.234880] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d9609e-8e39-48c1-b850-ca1c2e405016 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.240842] env[63379]: DEBUG nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2004.252240] env[63379]: DEBUG nova.compute.provider_tree [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2004.265108] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2004.265353] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2004.265520] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2004.265710] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2004.265861] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2004.266025] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2004.266275] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2004.266447] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2004.266618] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2004.266785] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2004.266962] env[63379]: DEBUG nova.virt.hardware [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2004.267931] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318fee5c-f37b-4d19-99f0-f28752eb0ee7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.275506] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d10343-0f81-4570-9c95-7028c9ee0158 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.320541] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.320781] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.321021] env[63379]: DEBUG nova.network.neutron [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2004.469750] env[63379]: DEBUG nova.network.neutron [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Port 05160396-15ed-49fa-b2de-3793f1f45863 binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2004.596617] env[63379]: DEBUG nova.network.neutron [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Successfully updated port: 61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2004.711431] env[63379]: DEBUG nova.compute.manager [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Received event network-vif-plugged-61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2004.711431] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] Acquiring lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.711580] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.711751] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.711922] env[63379]: DEBUG nova.compute.manager [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] No waiting events found dispatching network-vif-plugged-61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2004.712109] env[63379]: WARNING nova.compute.manager [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Received unexpected event network-vif-plugged-61782886-48c0-44e0-a33b-122b4323cfe0 for instance with vm_state building and task_state spawning. [ 2004.712280] env[63379]: DEBUG nova.compute.manager [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Received event network-changed-61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2004.712440] env[63379]: DEBUG nova.compute.manager [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Refreshing instance network info cache due to event network-changed-61782886-48c0-44e0-a33b-122b4323cfe0. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2004.712620] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] Acquiring lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.712756] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] Acquired lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.712915] env[63379]: DEBUG nova.network.neutron [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Refreshing network info cache for port 61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2004.781336] env[63379]: DEBUG nova.scheduler.client.report [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 170 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2004.781666] env[63379]: DEBUG nova.compute.provider_tree [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 170 to 171 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2004.781873] env[63379]: DEBUG nova.compute.provider_tree [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2005.046097] env[63379]: DEBUG nova.network.neutron [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.099097] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.251479] env[63379]: DEBUG nova.network.neutron [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2005.286934] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.073s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.291614] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.489s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.293109] env[63379]: INFO nova.compute.claims [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2005.331394] env[63379]: DEBUG nova.network.neutron [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.490506] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.490773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.490957] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.548487] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.802488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-120767e2-0022-4ea2-806f-327b32a6c6d7 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.225s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.803907] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.016s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.803907] env[63379]: INFO nova.compute.manager [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Unshelving [ 2005.835193] env[63379]: DEBUG oslo_concurrency.lockutils [req-0c149d38-aaaf-4fe2-9118-a8db8727e949 req-9cef16ee-c467-4629-b37b-7ecbb65ada6b service nova] Releasing lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.835734] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.836016] env[63379]: DEBUG nova.network.neutron [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2006.057180] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6ccde2-16be-41c9-a999-f0226ee99518 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.064932] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3002785-1e73-45b4-91cb-c9efaa52c67f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.368547] env[63379]: DEBUG nova.network.neutron [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2006.431309] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a9b1e7-0e7a-41bf-b87e-26fff985cceb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.440483] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18d6004-f529-4735-b3fc-65bff289d0f3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.474463] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc9b511-8b26-4951-8a40-a104b03629e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.483275] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59455793-9502-4e8f-ba27-0aa81a47d7ca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.499505] env[63379]: DEBUG nova.compute.provider_tree [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2006.510408] env[63379]: DEBUG nova.network.neutron [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updating instance_info_cache with network_info: [{"id": "61782886-48c0-44e0-a33b-122b4323cfe0", "address": "fa:16:3e:f1:66:51", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61782886-48", "ovs_interfaceid": "61782886-48c0-44e0-a33b-122b4323cfe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.528867] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.529083] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.529289] env[63379]: DEBUG nova.network.neutron [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2006.826829] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.002692] env[63379]: DEBUG nova.scheduler.client.report [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2007.012727] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.013073] env[63379]: DEBUG nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Instance network_info: |[{"id": "61782886-48c0-44e0-a33b-122b4323cfe0", "address": "fa:16:3e:f1:66:51", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61782886-48", "ovs_interfaceid": "61782886-48c0-44e0-a33b-122b4323cfe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2007.013568] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:66:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6934071-bf85-4591-9c7d-55c7ea131262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61782886-48c0-44e0-a33b-122b4323cfe0', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2007.021279] env[63379]: DEBUG oslo.service.loopingcall [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2007.022117] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2007.022366] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0120758-b3d6-45c7-9376-d5472ade535d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.045735] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2007.045735] env[63379]: value = "task-1780435" [ 2007.045735] env[63379]: _type = "Task" [ 2007.045735] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.056263] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780435, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.159219] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14765022-6aa5-4fae-92d2-fc715e3d7ab6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.183943] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddcdabd-dbfa-4036-b651-7d485973d418 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.195022] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance 'df8d513d-c201-4ffe-894e-cf8c3318cecc' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2007.386615] env[63379]: DEBUG nova.network.neutron [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance_info_cache with network_info: [{"id": "05160396-15ed-49fa-b2de-3793f1f45863", "address": "fa:16:3e:b8:60:62", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05160396-15", "ovs_interfaceid": "05160396-15ed-49fa-b2de-3793f1f45863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.507636] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.508295] env[63379]: DEBUG nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2007.511359] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.685s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.511617] env[63379]: DEBUG nova.objects.instance [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lazy-loading 'pci_requests' on Instance uuid 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2007.557090] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780435, 'name': CreateVM_Task, 'duration_secs': 0.312523} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.557354] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2007.557953] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.558137] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.558535] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2007.558824] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f34fc5-2a80-4e95-b649-1d32f5f0560c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.564867] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2007.564867] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5297549d-d9b3-59c1-f45f-8d82d94d0f4e" [ 2007.564867] env[63379]: _type = "Task" [ 2007.564867] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.573920] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5297549d-d9b3-59c1-f45f-8d82d94d0f4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.701745] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2007.702083] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58734b1f-cdd9-4062-8722-97c6e28b88a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.710997] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2007.710997] env[63379]: value = "task-1780436" [ 2007.710997] env[63379]: _type = "Task" [ 2007.710997] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.719903] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.889995] env[63379]: DEBUG oslo_concurrency.lockutils [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.014893] env[63379]: DEBUG nova.compute.utils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2008.018293] env[63379]: DEBUG nova.objects.instance [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lazy-loading 'numa_topology' on Instance uuid 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2008.018917] env[63379]: DEBUG nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2008.019119] env[63379]: DEBUG nova.network.neutron [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2008.057823] env[63379]: DEBUG nova.policy [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2e7c2125f0044508dc4016c4de224e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9746ae945355479fa5880802e08d2b0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2008.076682] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5297549d-d9b3-59c1-f45f-8d82d94d0f4e, 'name': SearchDatastore_Task, 'duration_secs': 0.011603} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.077032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.077281] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2008.077572] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.077706] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.077904] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2008.078182] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d7be467-784a-45c4-ae18-a1e4858666be {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.088097] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2008.088366] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2008.089110] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d5048ec-a32c-44d6-88b7-c67d3051c222 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.095667] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2008.095667] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252073f-8c78-4eca-0c11-38d970c5c2ca" [ 2008.095667] env[63379]: _type = "Task" [ 2008.095667] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.105214] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252073f-8c78-4eca-0c11-38d970c5c2ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.229545] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780436, 'name': PowerOnVM_Task} progress is 74%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.386635] env[63379]: DEBUG nova.network.neutron [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Successfully created port: 09723c5d-7ef3-49ea-b024-03afb282b0d5 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2008.399030] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd8996f-8074-4873-8f78-816b5201f907 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.406725] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6238d3a8-91e2-40fd-8964-179439b0fe0d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.520377] env[63379]: DEBUG nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2008.523407] env[63379]: INFO nova.compute.claims [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2008.608033] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5252073f-8c78-4eca-0c11-38d970c5c2ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010131} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.608269] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7129018e-0668-44c8-8b44-32f496c0f35a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.615190] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2008.615190] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]526118ac-c73e-f68b-6120-1377b35c4808" [ 2008.615190] env[63379]: _type = "Task" [ 2008.615190] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.625150] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526118ac-c73e-f68b-6120-1377b35c4808, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.722065] env[63379]: DEBUG oslo_vmware.api [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780436, 'name': PowerOnVM_Task, 'duration_secs': 0.658245} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.722344] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2008.722536] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a681bbec-11ce-44cb-8a9a-1450cbcd02e6 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance 'df8d513d-c201-4ffe-894e-cf8c3318cecc' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2009.126150] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]526118ac-c73e-f68b-6120-1377b35c4808, 'name': SearchDatastore_Task, 'duration_secs': 0.030798} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.126442] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.126723] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0b06665f-befc-4fa3-9eef-2c2f74ba382f/0b06665f-befc-4fa3-9eef-2c2f74ba382f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2009.126994] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d789fb34-0a8a-41ba-8652-f76e4c701b4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.134103] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2009.134103] env[63379]: value = "task-1780437" [ 2009.134103] env[63379]: _type = "Task" [ 2009.134103] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.142456] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.508841] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cd79e2-9fdd-4e69-beb3-1b0b8d2985ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.528730] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3592ba8b-53ca-4a05-b0a5-dfded9481345 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.532574] env[63379]: DEBUG nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2009.543564] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff' progress to 83 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2009.564751] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2009.565031] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2009.565201] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2009.565408] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2009.565616] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2009.565795] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2009.566088] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2009.566302] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2009.566487] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2009.566667] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2009.566900] env[63379]: DEBUG nova.virt.hardware [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2009.567767] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88500ca5-eea0-4635-80ea-10f6d8bb7b74 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.580410] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fb203e-4ba0-40f6-b29b-526aec0d4cf5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.649739] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780437, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.703719] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dcffa3-6cac-4f3e-91db-8365350f4580 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.712233] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488e3b8f-ec86-4e0f-924e-73582cd260a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.744426] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c94717-a460-4cd4-b0ca-2dc7c7fc84fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.752987] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fdde53-1481-48a7-a861-4de61fe397b7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.768592] env[63379]: DEBUG nova.compute.provider_tree [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2010.041823] env[63379]: DEBUG nova.compute.manager [req-43c2caa8-292a-4ea3-ad81-e056cdf2f0e2 req-ad22e9fd-6675-4cb3-b55a-9384f7ee77cf service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Received event network-vif-plugged-09723c5d-7ef3-49ea-b024-03afb282b0d5 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2010.042092] env[63379]: DEBUG oslo_concurrency.lockutils [req-43c2caa8-292a-4ea3-ad81-e056cdf2f0e2 req-ad22e9fd-6675-4cb3-b55a-9384f7ee77cf service nova] Acquiring lock "20649b93-78ac-4805-aa24-5dbfef9d766b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.042331] env[63379]: DEBUG oslo_concurrency.lockutils [req-43c2caa8-292a-4ea3-ad81-e056cdf2f0e2 req-ad22e9fd-6675-4cb3-b55a-9384f7ee77cf service nova] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.042491] env[63379]: DEBUG oslo_concurrency.lockutils [req-43c2caa8-292a-4ea3-ad81-e056cdf2f0e2 req-ad22e9fd-6675-4cb3-b55a-9384f7ee77cf service nova] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.042692] env[63379]: DEBUG nova.compute.manager [req-43c2caa8-292a-4ea3-ad81-e056cdf2f0e2 req-ad22e9fd-6675-4cb3-b55a-9384f7ee77cf service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] No waiting events found dispatching network-vif-plugged-09723c5d-7ef3-49ea-b024-03afb282b0d5 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2010.042927] env[63379]: WARNING nova.compute.manager [req-43c2caa8-292a-4ea3-ad81-e056cdf2f0e2 req-ad22e9fd-6675-4cb3-b55a-9384f7ee77cf service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Received unexpected event network-vif-plugged-09723c5d-7ef3-49ea-b024-03afb282b0d5 for instance with vm_state building and task_state spawning. [ 2010.050382] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2010.050965] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90861308-6ee8-4f93-ac41-8067577538bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.059894] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2010.059894] env[63379]: value = "task-1780438" [ 2010.059894] env[63379]: _type = "Task" [ 2010.059894] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.068487] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.134230] env[63379]: DEBUG nova.network.neutron [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Successfully updated port: 09723c5d-7ef3-49ea-b024-03afb282b0d5 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2010.147808] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780437, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648559} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.148825] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 0b06665f-befc-4fa3-9eef-2c2f74ba382f/0b06665f-befc-4fa3-9eef-2c2f74ba382f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2010.149077] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2010.149595] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad1b6808-b8bc-46cf-b906-7cb670611e52 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.156901] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2010.156901] env[63379]: value = "task-1780439" [ 2010.156901] env[63379]: _type = "Task" [ 2010.156901] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.168779] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.271725] env[63379]: DEBUG nova.scheduler.client.report [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2010.571673] env[63379]: DEBUG oslo_vmware.api [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780438, 'name': PowerOnVM_Task, 'duration_secs': 0.410218} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.572029] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2010.572236] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-673e76ae-1851-4844-b52d-19ea7ee45e0e tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff' progress to 100 {{(pid=63379) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2010.637056] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "refresh_cache-20649b93-78ac-4805-aa24-5dbfef9d766b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.637218] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "refresh_cache-20649b93-78ac-4805-aa24-5dbfef9d766b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.637357] env[63379]: DEBUG nova.network.neutron [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2010.666864] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063812} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.667163] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2010.667952] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8003b1d6-126e-4b25-9cd2-c876756c62c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.690430] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 0b06665f-befc-4fa3-9eef-2c2f74ba382f/0b06665f-befc-4fa3-9eef-2c2f74ba382f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2010.690943] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67250a87-abde-430a-aaa5-38390bd1157a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.711758] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2010.711758] env[63379]: value = "task-1780440" [ 2010.711758] env[63379]: _type = "Task" [ 2010.711758] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.720548] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.777203] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.266s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.807776] env[63379]: INFO nova.network.neutron [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating port 7dc69df5-b3d2-494c-b700-584c31779f9a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2011.167828] env[63379]: DEBUG nova.network.neutron [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2011.192031] env[63379]: DEBUG nova.network.neutron [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Port 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e binding to destination host cpu-1 is already ACTIVE {{(pid=63379) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2011.192291] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.192450] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.192621] env[63379]: DEBUG nova.network.neutron [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2011.222461] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780440, 'name': ReconfigVM_Task, 'duration_secs': 0.25693} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.222799] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 0b06665f-befc-4fa3-9eef-2c2f74ba382f/0b06665f-befc-4fa3-9eef-2c2f74ba382f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2011.223559] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02aa0c68-fc30-46f9-941e-c7a83b6cfb4e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.230868] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2011.230868] env[63379]: value = "task-1780441" [ 2011.230868] env[63379]: _type = "Task" [ 2011.230868] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.244577] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780441, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.307204] env[63379]: DEBUG nova.network.neutron [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Updating instance_info_cache with network_info: [{"id": "09723c5d-7ef3-49ea-b024-03afb282b0d5", "address": "fa:16:3e:5c:2e:74", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09723c5d-7e", "ovs_interfaceid": "09723c5d-7ef3-49ea-b024-03afb282b0d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.741223] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780441, 'name': Rename_Task, 'duration_secs': 0.143821} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.741561] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2011.741764] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f087e7c3-6cf6-4d15-bfd9-768228d63317 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.749954] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2011.749954] env[63379]: value = "task-1780442" [ 2011.749954] env[63379]: _type = "Task" [ 2011.749954] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.759116] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.810495] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "refresh_cache-20649b93-78ac-4805-aa24-5dbfef9d766b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.810984] env[63379]: DEBUG nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Instance network_info: |[{"id": "09723c5d-7ef3-49ea-b024-03afb282b0d5", "address": "fa:16:3e:5c:2e:74", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09723c5d-7e", "ovs_interfaceid": "09723c5d-7ef3-49ea-b024-03afb282b0d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2011.811452] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:2e:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09723c5d-7ef3-49ea-b024-03afb282b0d5', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2011.819398] env[63379]: DEBUG oslo.service.loopingcall [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2011.819653] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2011.819839] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdb44e32-d358-4481-9ab0-6fd0e9f987f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.843070] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2011.843070] env[63379]: value = "task-1780443" [ 2011.843070] env[63379]: _type = "Task" [ 2011.843070] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.851406] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780443, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.959021] env[63379]: DEBUG nova.network.neutron [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.087728] env[63379]: DEBUG nova.compute.manager [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Received event network-changed-09723c5d-7ef3-49ea-b024-03afb282b0d5 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2012.087728] env[63379]: DEBUG nova.compute.manager [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Refreshing instance network info cache due to event network-changed-09723c5d-7ef3-49ea-b024-03afb282b0d5. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2012.087988] env[63379]: DEBUG oslo_concurrency.lockutils [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] Acquiring lock "refresh_cache-20649b93-78ac-4805-aa24-5dbfef9d766b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.088067] env[63379]: DEBUG oslo_concurrency.lockutils [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] Acquired lock "refresh_cache-20649b93-78ac-4805-aa24-5dbfef9d766b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.088397] env[63379]: DEBUG nova.network.neutron [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Refreshing network info cache for port 09723c5d-7ef3-49ea-b024-03afb282b0d5 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2012.262940] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780442, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.333574] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.334475] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.334475] env[63379]: DEBUG nova.network.neutron [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2012.354052] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780443, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.462121] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.635380] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.635698] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.635894] env[63379]: DEBUG nova.compute.manager [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Going to confirm migration 10 {{(pid=63379) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2012.765772] env[63379]: DEBUG oslo_vmware.api [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780442, 'name': PowerOnVM_Task, 'duration_secs': 0.545423} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.766101] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2012.766352] env[63379]: INFO nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Took 8.53 seconds to spawn the instance on the hypervisor. [ 2012.766625] env[63379]: DEBUG nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2012.767570] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a012d0-d868-486e-a070-daa8483296bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.835951] env[63379]: DEBUG nova.network.neutron [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Updated VIF entry in instance network info cache for port 09723c5d-7ef3-49ea-b024-03afb282b0d5. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2012.836690] env[63379]: DEBUG nova.network.neutron [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Updating instance_info_cache with network_info: [{"id": "09723c5d-7ef3-49ea-b024-03afb282b0d5", "address": "fa:16:3e:5c:2e:74", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09723c5d-7e", "ovs_interfaceid": "09723c5d-7ef3-49ea-b024-03afb282b0d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.854146] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780443, 'name': CreateVM_Task, 'duration_secs': 0.602827} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.854384] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2012.855036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.855233] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.855597] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2012.855856] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ea76b5d-f72b-428c-b517-77f2bc4f0547 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.862587] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2012.862587] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8" [ 2012.862587] env[63379]: _type = "Task" [ 2012.862587] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.873835] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.965618] env[63379]: DEBUG nova.compute.manager [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63379) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2013.076283] env[63379]: DEBUG nova.network.neutron [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": "7dc69df5-b3d2-494c-b700-584c31779f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.205824] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.206036] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquired lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.206237] env[63379]: DEBUG nova.network.neutron [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2013.206500] env[63379]: DEBUG nova.objects.instance [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'info_cache' on Instance uuid 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2013.291548] env[63379]: INFO nova.compute.manager [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Took 13.34 seconds to build instance. [ 2013.340626] env[63379]: DEBUG oslo_concurrency.lockutils [req-39226f44-6175-4b76-9462-9f770c86f7f1 req-a57547b8-13c8-4c26-9e63-8cab4dc6229e service nova] Releasing lock "refresh_cache-20649b93-78ac-4805-aa24-5dbfef9d766b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.372963] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.579369] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.607234] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='0925f80f0ee66d01170f171365361202',container_format='bare',created_at=2024-12-11T23:38:43Z,direct_url=,disk_format='vmdk',id=3959d3db-eff7-402f-81f5-8f67a00a1f20,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1611582952-shelved',owner='28f7e38c300546a2a7a033cb12c7f89a',properties=ImageMetaProps,protected=,size=31660032,status='active',tags=,updated_at=2024-12-11T23:38:57Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2013.607499] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2013.607661] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2013.607848] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2013.607999] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2013.608168] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2013.608420] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2013.608592] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2013.608763] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2013.608930] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2013.609120] env[63379]: DEBUG nova.virt.hardware [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2013.610228] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70eb7d9-eefd-4bef-ac3d-535f2e7c943d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.619389] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977657ea-cd16-4612-889d-4f7f5701bde1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.633126] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:a2:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dc69df5-b3d2-494c-b700-584c31779f9a', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2013.640409] env[63379]: DEBUG oslo.service.loopingcall [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2013.640897] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2013.641128] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-becd6320-e124-460c-9398-b1512d8abe36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.661503] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2013.661503] env[63379]: value = "task-1780444" [ 2013.661503] env[63379]: _type = "Task" [ 2013.661503] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.669182] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780444, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.793576] env[63379]: DEBUG oslo_concurrency.lockutils [None req-8c87d46d-5ce6-4d13-b62e-8ee92cfdded4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.855s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.873553] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.087532] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.087532] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.114064] env[63379]: DEBUG nova.compute.manager [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received event network-vif-plugged-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2014.114343] env[63379]: DEBUG oslo_concurrency.lockutils [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.114555] env[63379]: DEBUG oslo_concurrency.lockutils [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.114731] env[63379]: DEBUG oslo_concurrency.lockutils [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.114931] env[63379]: DEBUG nova.compute.manager [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] No waiting events found dispatching network-vif-plugged-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2014.115101] env[63379]: WARNING nova.compute.manager [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received unexpected event network-vif-plugged-7dc69df5-b3d2-494c-b700-584c31779f9a for instance with vm_state shelved_offloaded and task_state spawning. [ 2014.115274] env[63379]: DEBUG nova.compute.manager [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received event network-changed-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2014.115468] env[63379]: DEBUG nova.compute.manager [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Refreshing instance network info cache due to event network-changed-7dc69df5-b3d2-494c-b700-584c31779f9a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2014.115722] env[63379]: DEBUG oslo_concurrency.lockutils [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] Acquiring lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.115826] env[63379]: DEBUG oslo_concurrency.lockutils [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] Acquired lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.116034] env[63379]: DEBUG nova.network.neutron [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Refreshing network info cache for port 7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2014.173442] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780444, 'name': CreateVM_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.376196] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.483269] env[63379]: DEBUG nova.network.neutron [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance_info_cache with network_info: [{"id": "05160396-15ed-49fa-b2de-3793f1f45863", "address": "fa:16:3e:b8:60:62", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05160396-15", "ovs_interfaceid": "05160396-15ed-49fa-b2de-3793f1f45863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.593872] env[63379]: DEBUG nova.objects.instance [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'migration_context' on Instance uuid df8d513d-c201-4ffe-894e-cf8c3318cecc {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2014.656852] env[63379]: DEBUG nova.compute.manager [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Received event network-changed-61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2014.657109] env[63379]: DEBUG nova.compute.manager [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Refreshing instance network info cache due to event network-changed-61782886-48c0-44e0-a33b-122b4323cfe0. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2014.657355] env[63379]: DEBUG oslo_concurrency.lockutils [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] Acquiring lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.657523] env[63379]: DEBUG oslo_concurrency.lockutils [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] Acquired lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.657786] env[63379]: DEBUG nova.network.neutron [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Refreshing network info cache for port 61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2014.672442] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780444, 'name': CreateVM_Task, 'duration_secs': 0.618962} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.672613] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2014.673265] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.673458] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.673821] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2014.674083] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-460e44c0-e631-4160-9094-0675904ae19a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.679579] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2014.679579] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a31d2-1227-1f5f-174f-376901159eb8" [ 2014.679579] env[63379]: _type = "Task" [ 2014.679579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.688213] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a31d2-1227-1f5f-174f-376901159eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.821411] env[63379]: DEBUG nova.network.neutron [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updated VIF entry in instance network info cache for port 7dc69df5-b3d2-494c-b700-584c31779f9a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2014.821870] env[63379]: DEBUG nova.network.neutron [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": "7dc69df5-b3d2-494c-b700-584c31779f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.874650] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.986545] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Releasing lock "refresh_cache-8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.986836] env[63379]: DEBUG nova.objects.instance [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'migration_context' on Instance uuid 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2015.191377] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a31d2-1227-1f5f-174f-376901159eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.279273] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a846121-77bc-4489-8730-3b68118e44fe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.290011] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2f67a4-0321-4d3c-82ff-88a72219931c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.325820] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a503a5d-25ad-4808-a105-3d277f95a14f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.328711] env[63379]: DEBUG oslo_concurrency.lockutils [req-30c994ea-005f-43f1-add8-79ffc55b952c req-5d3ac998-b5d1-4d08-a8fa-2e85d12d1b27 service nova] Releasing lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.334806] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838844ec-d8c5-4f86-ad35-5f895a60930a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.349217] env[63379]: DEBUG nova.compute.provider_tree [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2015.376176] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.430099] env[63379]: DEBUG nova.network.neutron [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updated VIF entry in instance network info cache for port 61782886-48c0-44e0-a33b-122b4323cfe0. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2015.430518] env[63379]: DEBUG nova.network.neutron [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updating instance_info_cache with network_info: [{"id": "61782886-48c0-44e0-a33b-122b4323cfe0", "address": "fa:16:3e:f1:66:51", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61782886-48", "ovs_interfaceid": "61782886-48c0-44e0-a33b-122b4323cfe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.490633] env[63379]: DEBUG nova.objects.base [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Object Instance<8b9f070e-11d3-4e2d-a0ce-54bb939a36ff> lazy-loaded attributes: info_cache,migration_context {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2015.491587] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735e7d80-0f94-4b20-945e-d292e8dfc827 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.511988] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e00b396-d8bb-464a-b6d3-25c54693ab7f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.517558] env[63379]: DEBUG oslo_vmware.api [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2015.517558] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52462aae-4612-7bd5-45ce-33b68b894fad" [ 2015.517558] env[63379]: _type = "Task" [ 2015.517558] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.525911] env[63379]: DEBUG oslo_vmware.api [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52462aae-4612-7bd5-45ce-33b68b894fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.691590] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a31d2-1227-1f5f-174f-376901159eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.877461] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.933535] env[63379]: DEBUG oslo_concurrency.lockutils [req-98f33701-95de-4bd1-a8f2-9762166e51d0 req-2e3e4fa7-4141-41ca-81c9-289183d3a765 service nova] Releasing lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.946953] env[63379]: DEBUG nova.scheduler.client.report [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2015.947249] env[63379]: DEBUG nova.compute.provider_tree [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 171 to 172 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2015.947444] env[63379]: DEBUG nova.compute.provider_tree [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2015.963224] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.963428] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.027129] env[63379]: DEBUG oslo_vmware.api [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52462aae-4612-7bd5-45ce-33b68b894fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.192491] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a31d2-1227-1f5f-174f-376901159eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.249714] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "a39c5511-3efc-41e9-8902-692f237557e1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.249976] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.377340] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.466492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.528882] env[63379]: DEBUG oslo_vmware.api [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52462aae-4612-7bd5-45ce-33b68b894fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.692753] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a31d2-1227-1f5f-174f-376901159eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.753419] env[63379]: DEBUG nova.compute.utils [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2016.877074] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.958512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.871s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.964394] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.498s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.964526] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.964595] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2016.967210] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f78a13-53be-4a67-91d6-2c4014795892 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.980052] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cb8ec3-80bb-4989-a546-c1f10252732a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.997412] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fba4de-1317-4e91-bc7e-bfb72aad383f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.004250] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20944e1b-357d-49cb-b78d-8818f4c0c6ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.035068] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180503MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2017.035240] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.035444] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.045030] env[63379]: DEBUG oslo_vmware.api [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52462aae-4612-7bd5-45ce-33b68b894fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.193580] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]523a31d2-1227-1f5f-174f-376901159eb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.256475] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.378554] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cfaee4-5cf0-bd9f-5667-11352c03dfd8, 'name': SearchDatastore_Task, 'duration_secs': 4.358321} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.378860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.379111] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2017.379363] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.379512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.379692] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.379950] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7864e8fd-7446-45e9-8538-2d0551540806 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.388416] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.388610] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2017.389436] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-816f9495-5787-4e50-952d-8271c40d6aa3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.395244] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2017.395244] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522decce-a0f4-57c1-3e9d-0fa22e9fe1e6" [ 2017.395244] env[63379]: _type = "Task" [ 2017.395244] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.403308] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522decce-a0f4-57c1-3e9d-0fa22e9fe1e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.549369] env[63379]: DEBUG oslo_vmware.api [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52462aae-4612-7bd5-45ce-33b68b894fad, 'name': SearchDatastore_Task, 'duration_secs': 1.705512} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.549882] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.701571] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.701994] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Processing image 3959d3db-eff7-402f-81f5-8f67a00a1f20 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2017.702399] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20/3959d3db-eff7-402f-81f5-8f67a00a1f20.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.702668] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20/3959d3db-eff7-402f-81f5-8f67a00a1f20.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.702975] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.703372] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e646865-f266-4ff9-a8b3-ad81b3a286f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.714625] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.714952] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2017.716131] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff6e2f91-01f3-4cf5-9cb1-db22e1558227 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.724167] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2017.724167] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52951177-4489-ab63-e536-f679db129ede" [ 2017.724167] env[63379]: _type = "Task" [ 2017.724167] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.736453] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52951177-4489-ab63-e536-f679db129ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.906199] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522decce-a0f4-57c1-3e9d-0fa22e9fe1e6, 'name': SearchDatastore_Task, 'duration_secs': 0.01961} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.907067] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbc98cf4-d963-4e84-bb2a-fe04ef045357 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.914080] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2017.914080] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]522d105e-0a77-bba0-08c3-12e7641c1040" [ 2017.914080] env[63379]: _type = "Task" [ 2017.914080] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.920931] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522d105e-0a77-bba0-08c3-12e7641c1040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.050872] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Applying migration context for instance 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff as it has an incoming, in-progress migration 4814e176-a955-41b5-bf25-9bfbcbc945cb. Migration status is confirming {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2018.052208] env[63379]: INFO nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating resource usage from migration 4814e176-a955-41b5-bf25-9bfbcbc945cb [ 2018.071752] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 510db409-0b4c-494a-8084-39ef3cd6c918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.071918] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance e1681d89-2f55-47b7-9962-55aa169b3d0a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.072057] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance a39c5511-3efc-41e9-8902-692f237557e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.072180] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 85ecb409-ab53-43d9-8120-2f8c7402d74c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.237210] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Preparing fetch location {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2018.237497] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Fetch image to [datastore1] OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e/OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e.vmdk {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2018.237705] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Downloading stream optimized image 3959d3db-eff7-402f-81f5-8f67a00a1f20 to [datastore1] OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e/OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e.vmdk on the data store datastore1 as vApp {{(pid=63379) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2018.237857] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Downloading image file data 3959d3db-eff7-402f-81f5-8f67a00a1f20 to the ESX as VM named 'OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e' {{(pid=63379) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2018.308773] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2018.308773] env[63379]: value = "resgroup-9" [ 2018.308773] env[63379]: _type = "ResourcePool" [ 2018.308773] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2018.309086] env[63379]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a1dacb89-4f11-4f14-9e80-1866ab0f8bc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.324477] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "a39c5511-3efc-41e9-8902-692f237557e1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.324477] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.324659] env[63379]: INFO nova.compute.manager [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Attaching volume bda791f5-f0d0-4e7a-9d83-041ad83b18a7 to /dev/sdb [ 2018.333430] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lease: (returnval){ [ 2018.333430] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4a830-33a9-9ee6-3a60-8adb6943a34d" [ 2018.333430] env[63379]: _type = "HttpNfcLease" [ 2018.333430] env[63379]: } obtained for vApp import into resource pool (val){ [ 2018.333430] env[63379]: value = "resgroup-9" [ 2018.333430] env[63379]: _type = "ResourcePool" [ 2018.333430] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2018.333709] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the lease: (returnval){ [ 2018.333709] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4a830-33a9-9ee6-3a60-8adb6943a34d" [ 2018.333709] env[63379]: _type = "HttpNfcLease" [ 2018.333709] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2018.343426] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2018.343426] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4a830-33a9-9ee6-3a60-8adb6943a34d" [ 2018.343426] env[63379]: _type = "HttpNfcLease" [ 2018.343426] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2018.358319] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e1f2e4-e783-4209-8331-0d5cabf7c8ec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.367100] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded9ceb8-c0c8-4c39-a3b0-718a5141cbcd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.382561] env[63379]: DEBUG nova.virt.block_device [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Updating existing volume attachment record: b20a5daa-964a-4bd1-b46d-2d487ac5dc62 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2018.424075] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]522d105e-0a77-bba0-08c3-12e7641c1040, 'name': SearchDatastore_Task, 'duration_secs': 0.0098} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.424573] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.424714] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 20649b93-78ac-4805-aa24-5dbfef9d766b/20649b93-78ac-4805-aa24-5dbfef9d766b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2018.424967] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c94db0d-aa33-4f82-adb8-86f4fd5ed49a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.432362] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2018.432362] env[63379]: value = "task-1780446" [ 2018.432362] env[63379]: _type = "Task" [ 2018.432362] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.441500] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780446, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.500909] env[63379]: INFO nova.compute.manager [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Swapping old allocation on dict_keys(['cf478c89-515f-4372-b90f-4868ab56e978']) held by migration 303b138d-23ef-452b-9f59-f1d338dae5c7 for instance [ 2018.525941] env[63379]: DEBUG nova.scheduler.client.report [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Overwriting current allocation {'allocations': {'cf478c89-515f-4372-b90f-4868ab56e978': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 172}}, 'project_id': '645f0e0a5e1a44d59ca9c85da49bb454', 'user_id': '99f3906f7b7e47a1a81c5c8f38d5b4ea', 'consumer_generation': 1} on consumer df8d513d-c201-4ffe-894e-cf8c3318cecc {{(pid=63379) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2018.575291] env[63379]: INFO nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 303b138d-23ef-452b-9f59-f1d338dae5c7 has allocations against this compute host but is not found in the database. [ 2018.575456] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance df8d513d-c201-4ffe-894e-cf8c3318cecc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.575685] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Migration 4814e176-a955-41b5-bf25-9bfbcbc945cb is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2018.575961] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.576196] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 0b06665f-befc-4fa3-9eef-2c2f74ba382f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.576434] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 20649b93-78ac-4805-aa24-5dbfef9d766b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.576579] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 10fc842d-b821-4103-b6a5-f5b2fc46ea74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2018.576827] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2018.577013] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2560MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2018.614880] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.615227] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.615459] env[63379]: DEBUG nova.network.neutron [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2018.737206] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e024114-86ab-423b-a717-e9b456cea9bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.746139] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8dff35-85ba-44e5-ab75-93c7a7d9a571 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.781884] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07530619-39cc-4518-9a03-e8d33628ca58 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.792297] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76064630-9b79-4f7e-a93b-c963cca7d651 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.808206] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.843934] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2018.843934] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4a830-33a9-9ee6-3a60-8adb6943a34d" [ 2018.843934] env[63379]: _type = "HttpNfcLease" [ 2018.843934] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2018.944223] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780446, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.311898] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2019.343093] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2019.343093] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4a830-33a9-9ee6-3a60-8adb6943a34d" [ 2019.343093] env[63379]: _type = "HttpNfcLease" [ 2019.343093] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2019.347753] env[63379]: DEBUG nova.network.neutron [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [{"id": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "address": "fa:16:3e:2d:9c:d3", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a4d9a7-51", "ovs_interfaceid": "43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.446391] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780446, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656525} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.446697] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 20649b93-78ac-4805-aa24-5dbfef9d766b/20649b93-78ac-4805-aa24-5dbfef9d766b.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2019.446932] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2019.447205] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18d4039a-0ce2-4716-ad2f-3065ecd037aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.454714] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2019.454714] env[63379]: value = "task-1780448" [ 2019.454714] env[63379]: _type = "Task" [ 2019.454714] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.463214] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.816635] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2019.816848] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.781s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.817222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.267s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.843603] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2019.843603] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4a830-33a9-9ee6-3a60-8adb6943a34d" [ 2019.843603] env[63379]: _type = "HttpNfcLease" [ 2019.843603] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2019.843906] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2019.843906] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e4a830-33a9-9ee6-3a60-8adb6943a34d" [ 2019.843906] env[63379]: _type = "HttpNfcLease" [ 2019.843906] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2019.844658] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4009c0-e279-4bbe-b95c-656c8cef8c12 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.852794] env[63379]: DEBUG oslo_concurrency.lockutils [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-df8d513d-c201-4ffe-894e-cf8c3318cecc" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.853187] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52572087-6937-4780-a71d-3009d6055916/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2019.853362] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating HTTP connection to write to file with size = 31660032 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52572087-6937-4780-a71d-3009d6055916/disk-0.vmdk. {{(pid=63379) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2019.855232] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1474e89a-8e5d-4491-8de4-1f1b006610c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.914351] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d098b2d0-9c53-413c-8933-1cc1eb67abde {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.921568] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-563e3185-b998-4287-821a-36f286018481 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.965229] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065629} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.965528] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2019.966448] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6a419e-838e-4604-8e86-064a8c077b06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.989646] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 20649b93-78ac-4805-aa24-5dbfef9d766b/20649b93-78ac-4805-aa24-5dbfef9d766b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2019.989951] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-371402b6-4b55-4464-b884-a6c92d8773d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.010084] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2020.010084] env[63379]: value = "task-1780449" [ 2020.010084] env[63379]: _type = "Task" [ 2020.010084] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.021401] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780449, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.466079] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febbf43a-8ec4-4d4f-83dc-d06d70bef696 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.481925] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd35c2a3-5ab5-4f17-881a-83d1207f6132 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.530908] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbae5bbb-45dd-4550-bca7-9111ed003651 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.544830] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780449, 'name': ReconfigVM_Task, 'duration_secs': 0.362199} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.546301] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 20649b93-78ac-4805-aa24-5dbfef9d766b/20649b93-78ac-4805-aa24-5dbfef9d766b.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2020.547985] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953eec9e-09b4-4f15-851e-c0114c13551f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.553989] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-664ca2c6-ebca-4bf3-9323-d6592573a84f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.567619] env[63379]: DEBUG nova.compute.provider_tree [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.572696] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2020.572696] env[63379]: value = "task-1780450" [ 2020.572696] env[63379]: _type = "Task" [ 2020.572696] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.589229] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780450, 'name': Rename_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.815056] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.815361] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.815483] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.815654] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2021.038554] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2021.038847] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f274437-c671-4762-97e1-27328a74fa13 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.048488] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2021.048488] env[63379]: value = "task-1780452" [ 2021.048488] env[63379]: _type = "Task" [ 2021.048488] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.059017] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.074549] env[63379]: DEBUG nova.scheduler.client.report [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2021.085260] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Completed reading data from the image iterator. {{(pid=63379) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2021.085418] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52572087-6937-4780-a71d-3009d6055916/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2021.086223] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8adb8e9-cf3f-442f-873f-bd4aa3066b4d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.092666] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780450, 'name': Rename_Task, 'duration_secs': 0.160425} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.093313] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2021.093847] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81fa80e1-b2e3-46e5-9117-594c16ea624a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.097183] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52572087-6937-4780-a71d-3009d6055916/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2021.097354] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52572087-6937-4780-a71d-3009d6055916/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2021.097912] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4fa901c4-ef16-4013-8b02-5e7a4b0b30ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.103576] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2021.103576] env[63379]: value = "task-1780453" [ 2021.103576] env[63379]: _type = "Task" [ 2021.103576] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.112850] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780453, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.365734] env[63379]: DEBUG oslo_vmware.rw_handles [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52572087-6937-4780-a71d-3009d6055916/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2021.366067] env[63379]: INFO nova.virt.vmwareapi.images [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Downloaded image file data 3959d3db-eff7-402f-81f5-8f67a00a1f20 [ 2021.366937] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9e2d05-222d-4c8e-8fe3-8123539c4f98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.383018] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afa8140d-8039-4660-abad-2e8f9819f37c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.423744] env[63379]: INFO nova.virt.vmwareapi.images [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] The imported VM was unregistered [ 2021.426389] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Caching image {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2021.426646] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Creating directory with path [datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2021.426932] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-deb6a0a9-2e38-4df0-ae06-0ab070ec5ae3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.448294] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Created directory with path [datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2021.448687] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e/OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e.vmdk to [datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20/3959d3db-eff7-402f-81f5-8f67a00a1f20.vmdk. {{(pid=63379) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2021.449293] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7d17dc2c-108a-4a04-9519-bb665a656902 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.457536] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2021.457536] env[63379]: value = "task-1780455" [ 2021.457536] env[63379]: _type = "Task" [ 2021.457536] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.467531] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.560343] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780452, 'name': PowerOffVM_Task, 'duration_secs': 0.22039} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.560576] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2021.561376] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2021.561547] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2021.561713] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2021.561896] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2021.562061] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2021.562234] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2021.562447] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2021.562611] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2021.562803] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2021.562987] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2021.563181] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2021.568109] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fd3fd90-9919-4c0e-b26c-40b4b240e002 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.587824] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2021.587824] env[63379]: value = "task-1780456" [ 2021.587824] env[63379]: _type = "Task" [ 2021.587824] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.597300] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780456, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.615143] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780453, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.965541] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.965732] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2021.965905] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 2021.970746] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.094253] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.277s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.104054] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780456, 'name': ReconfigVM_Task, 'duration_secs': 0.163794} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.104741] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787a1778-f065-441e-b068-b4ca14555d34 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.133784] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2022.134077] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2022.134264] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2022.134461] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2022.134614] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2022.134765] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2022.134982] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2022.135168] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2022.135345] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2022.135523] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2022.135706] env[63379]: DEBUG nova.virt.hardware [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2022.140441] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c876bc77-60a1-4ea7-854b-f3ad1217bbc0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.143238] env[63379]: DEBUG oslo_vmware.api [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780453, 'name': PowerOnVM_Task, 'duration_secs': 0.591404} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.143506] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2022.143718] env[63379]: INFO nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Took 12.61 seconds to spawn the instance on the hypervisor. [ 2022.143900] env[63379]: DEBUG nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2022.145114] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60fee7f-e658-4146-a384-44fea3db0334 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.149903] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2022.149903] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c4d36e-befd-68b7-c2ed-20dc2df10abb" [ 2022.149903] env[63379]: _type = "Task" [ 2022.149903] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.164176] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c4d36e-befd-68b7-c2ed-20dc2df10abb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.468584] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.474105] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Skipping network cache update for instance because it is Building. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9987}} [ 2022.501787] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.501948] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.502116] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2022.502278] env[63379]: DEBUG nova.objects.instance [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lazy-loading 'info_cache' on Instance uuid 510db409-0b4c-494a-8084-39ef3cd6c918 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2022.660254] env[63379]: INFO nova.compute.manager [None req-96e401f9-ac7c-4b7e-8857-c91297f279e4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Get console output [ 2022.660996] env[63379]: WARNING nova.virt.vmwareapi.driver [None req-96e401f9-ac7c-4b7e-8857-c91297f279e4 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] The console log is missing. Check your VSPC configuration [ 2022.669061] env[63379]: INFO nova.scheduler.client.report [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted allocation for migration 4814e176-a955-41b5-bf25-9bfbcbc945cb [ 2022.674624] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52c4d36e-befd-68b7-c2ed-20dc2df10abb, 'name': SearchDatastore_Task, 'duration_secs': 0.082102} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.684257] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2022.686144] env[63379]: INFO nova.compute.manager [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Took 19.91 seconds to build instance. [ 2022.689058] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1f8fdd6-d213-4d30-b070-9a9d0b2e8fc2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.712303] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2022.712303] env[63379]: value = "task-1780457" [ 2022.712303] env[63379]: _type = "Task" [ 2022.712303] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.724448] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.931365] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2022.931674] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369525', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'name': 'volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a39c5511-3efc-41e9-8902-692f237557e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'serial': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2022.932649] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9845b7f5-f6f5-4893-9a2e-fbf50d9348ca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.950562] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9651b5c-1586-427a-adbb-3d378678dd0a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.982081] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7/volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2022.986208] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6162c89b-a865-424a-910d-3199a42c8861 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.008675] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.010240] env[63379]: DEBUG oslo_vmware.api [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 2023.010240] env[63379]: value = "task-1780458" [ 2023.010240] env[63379]: _type = "Task" [ 2023.010240] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.021025] env[63379]: DEBUG oslo_vmware.api [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.186748] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d199a1a0-ff33-479b-b744-b43987cb249c tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.551s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.203683] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6f75a633-bf38-4bb4-86f0-cd48948cb148 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.430s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.224447] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780457, 'name': ReconfigVM_Task, 'duration_secs': 0.268137} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.224785] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2023.225617] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6aeedd9-0276-4d9c-b53d-9bfc8fbd7228 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.254491] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2023.254843] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b75fac1-b379-462d-a7a3-2f0ff9a43e92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.275149] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2023.275149] env[63379]: value = "task-1780459" [ 2023.275149] env[63379]: _type = "Task" [ 2023.275149] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.285084] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780459, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.485114] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.523507] env[63379]: DEBUG oslo_vmware.api [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780458, 'name': ReconfigVM_Task, 'duration_secs': 0.453818} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.523826] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Reconfigured VM instance instance-00000070 to attach disk [datastore1] volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7/volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2023.528681] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f64d68d2-770b-4095-9163-1c370f8144b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.545366] env[63379]: DEBUG oslo_vmware.api [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 2023.545366] env[63379]: value = "task-1780460" [ 2023.545366] env[63379]: _type = "Task" [ 2023.545366] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.554565] env[63379]: DEBUG oslo_vmware.api [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780460, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.787327] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780459, 'name': ReconfigVM_Task, 'duration_secs': 0.408457} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.787630] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfigured VM instance instance-0000006c to attach disk [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc/df8d513d-c201-4ffe-894e-cf8c3318cecc.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2023.788576] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbb6579-b8aa-404f-aa39-7927305ad325 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.810707] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e93cb9b-9e96-409d-ae38-1873fc84d5ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.833114] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c3e60d-b13f-4d2a-8251-ae2011c5865e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.857737] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40797594-526e-400e-ac22-44b034676087 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.866060] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2023.866353] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf5b6442-6694-43e6-b084-dc90be9e576b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.874496] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2023.874496] env[63379]: value = "task-1780461" [ 2023.874496] env[63379]: _type = "Task" [ 2023.874496] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.883572] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.985650] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.991300] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "20649b93-78ac-4805-aa24-5dbfef9d766b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.991562] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.991785] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "20649b93-78ac-4805-aa24-5dbfef9d766b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.991972] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.992170] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.994430] env[63379]: INFO nova.compute.manager [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Terminating instance [ 2023.996363] env[63379]: DEBUG nova.compute.manager [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2023.996570] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2023.997442] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654c1c88-f364-4576-ade8-0b4a935ad9b5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.006925] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2024.007183] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a3ebd7f-8dca-421b-8338-be5d10a2368a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.015640] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2024.015640] env[63379]: value = "task-1780462" [ 2024.015640] env[63379]: _type = "Task" [ 2024.015640] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.027746] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.056564] env[63379]: DEBUG oslo_vmware.api [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780460, 'name': ReconfigVM_Task, 'duration_secs': 0.172115} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.057043] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369525', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'name': 'volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a39c5511-3efc-41e9-8902-692f237557e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'serial': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2024.292120] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [{"id": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "address": "fa:16:3e:bc:a5:55", "network": {"id": "867cf8d8-4bba-4306-ad6d-632c9dc6863d", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-777715300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a3363a90de2d4d5988ddd03974c10d0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cdabd2b-f6", "ovs_interfaceid": "6cdabd2b-f665-46a9-a86e-2527cfe452bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2024.387569] env[63379]: DEBUG oslo_vmware.api [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780461, 'name': PowerOnVM_Task, 'duration_secs': 0.467644} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.387976] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2024.490664] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.528724] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.795231] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-510db409-0b4c-494a-8084-39ef3cd6c918" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.795459] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 2024.795703] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.964078] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2024.985651] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.026375] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780462, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.104350] env[63379]: DEBUG nova.objects.instance [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'flavor' on Instance uuid a39c5511-3efc-41e9-8902-692f237557e1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2025.430354] env[63379]: INFO nova.compute.manager [None req-01d1a630-7b28-44d5-b96a-8cc9d318c272 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance to original state: 'active' [ 2025.490677] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780455, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.63694} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.491262] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e/OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e.vmdk to [datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20/3959d3db-eff7-402f-81f5-8f67a00a1f20.vmdk. [ 2025.491506] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Cleaning up location [datastore1] OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2025.491840] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_f25df25d-1b17-4f88-92fe-6a7110495e8e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2025.492253] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4541cef-54ea-4d7d-9346-446f7bb3230b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.501118] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2025.501118] env[63379]: value = "task-1780463" [ 2025.501118] env[63379]: _type = "Task" [ 2025.501118] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.509169] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.525835] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780462, 'name': PowerOffVM_Task, 'duration_secs': 1.080267} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.526123] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2025.526305] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2025.526564] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22fe7ac8-0f4d-4cd0-b763-0c845d04a6e0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.609395] env[63379]: DEBUG oslo_concurrency.lockutils [None req-31ff0f0e-4ca0-48a0-9e11-74e82e2cf322 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.285s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.754998] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2025.755336] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2025.755594] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleting the datastore file [datastore1] 20649b93-78ac-4805-aa24-5dbfef9d766b {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2025.755976] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f617cc85-d415-4a6e-b747-e8f209a16c2e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.759913] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "a39c5511-3efc-41e9-8902-692f237557e1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.760156] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.763217] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2025.763217] env[63379]: value = "task-1780465" [ 2025.763217] env[63379]: _type = "Task" [ 2025.763217] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.771414] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.960221] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.011527] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036008} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.011796] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2026.011968] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20/3959d3db-eff7-402f-81f5-8f67a00a1f20.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2026.012241] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20/3959d3db-eff7-402f-81f5-8f67a00a1f20.vmdk to [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2026.012492] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ef64e46-8da2-43ae-8bb7-9817b7d006a6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.019521] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2026.019521] env[63379]: value = "task-1780466" [ 2026.019521] env[63379]: _type = "Task" [ 2026.019521] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.027273] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.263381] env[63379]: INFO nova.compute.manager [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Detaching volume bda791f5-f0d0-4e7a-9d83-041ad83b18a7 [ 2026.280277] env[63379]: DEBUG oslo_vmware.api [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141703} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.280632] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2026.280882] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2026.281163] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2026.281425] env[63379]: INFO nova.compute.manager [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Took 2.28 seconds to destroy the instance on the hypervisor. [ 2026.281763] env[63379]: DEBUG oslo.service.loopingcall [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2026.282035] env[63379]: DEBUG nova.compute.manager [-] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2026.282171] env[63379]: DEBUG nova.network.neutron [-] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2026.300825] env[63379]: INFO nova.virt.block_device [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Attempting to driver detach volume bda791f5-f0d0-4e7a-9d83-041ad83b18a7 from mountpoint /dev/sdb [ 2026.301121] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2026.301349] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369525', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'name': 'volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a39c5511-3efc-41e9-8902-692f237557e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'serial': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2026.302280] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a8fd2a-15fc-449f-a6da-0b1986b1694b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.330294] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caba3a5a-8cbd-41f8-afc2-88856d278366 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.339145] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66036f7d-f9cb-4e64-914c-5f79112cbd54 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.362393] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabd0c49-4fee-42a0-94c0-8a5606b1dea1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.380965] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] The volume has not been displaced from its original location: [datastore1] volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7/volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2026.387101] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Reconfiguring VM instance instance-00000070 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2026.387500] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccf09194-e24a-46b8-9f5d-488051d4a93e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.411412] env[63379]: DEBUG oslo_vmware.api [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 2026.411412] env[63379]: value = "task-1780467" [ 2026.411412] env[63379]: _type = "Task" [ 2026.411412] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.421841] env[63379]: DEBUG oslo_vmware.api [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780467, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.533300] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780466, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.626926] env[63379]: DEBUG nova.compute.manager [req-9b08fefa-e376-4d1e-94f4-5c1f413fc8cb req-9633baec-84e5-4d4a-9af0-6c5398efc0f8 service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Received event network-vif-deleted-09723c5d-7ef3-49ea-b024-03afb282b0d5 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2026.627219] env[63379]: INFO nova.compute.manager [req-9b08fefa-e376-4d1e-94f4-5c1f413fc8cb req-9633baec-84e5-4d4a-9af0-6c5398efc0f8 service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Neutron deleted interface 09723c5d-7ef3-49ea-b024-03afb282b0d5; detaching it from the instance and deleting it from the info cache [ 2026.627447] env[63379]: DEBUG nova.network.neutron [req-9b08fefa-e376-4d1e-94f4-5c1f413fc8cb req-9633baec-84e5-4d4a-9af0-6c5398efc0f8 service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.921301] env[63379]: DEBUG oslo_vmware.api [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780467, 'name': ReconfigVM_Task, 'duration_secs': 0.364506} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.921639] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Reconfigured VM instance instance-00000070 to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2026.926459] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5b479bd-4256-4d8e-9a38-94b15fad5931 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.944576] env[63379]: DEBUG oslo_vmware.api [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 2026.944576] env[63379]: value = "task-1780468" [ 2026.944576] env[63379]: _type = "Task" [ 2026.944576] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.955384] env[63379]: DEBUG oslo_vmware.api [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.964208] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.034725] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780466, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.098709] env[63379]: DEBUG nova.network.neutron [-] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.131881] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51483e35-487e-4a91-b23e-7b84beb05169 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.145232] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf5ba7b-fd44-4749-beb5-ce8a7205d607 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.180183] env[63379]: DEBUG nova.compute.manager [req-9b08fefa-e376-4d1e-94f4-5c1f413fc8cb req-9633baec-84e5-4d4a-9af0-6c5398efc0f8 service nova] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Detach interface failed, port_id=09723c5d-7ef3-49ea-b024-03afb282b0d5, reason: Instance 20649b93-78ac-4805-aa24-5dbfef9d766b could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2027.366739] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.366984] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.367238] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.367512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.367770] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.370222] env[63379]: INFO nova.compute.manager [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Terminating instance [ 2027.372309] env[63379]: DEBUG nova.compute.manager [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2027.372534] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2027.372782] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3523275d-eb8c-4bf2-a5cd-d07857f3ded2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.381699] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2027.381699] env[63379]: value = "task-1780469" [ 2027.381699] env[63379]: _type = "Task" [ 2027.381699] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.391872] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780469, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.455646] env[63379]: DEBUG oslo_vmware.api [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780468, 'name': ReconfigVM_Task, 'duration_secs': 0.176572} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.455948] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369525', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'name': 'volume-bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a39c5511-3efc-41e9-8902-692f237557e1', 'attached_at': '', 'detached_at': '', 'volume_id': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7', 'serial': 'bda791f5-f0d0-4e7a-9d83-041ad83b18a7'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2027.534186] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780466, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.603041] env[63379]: INFO nova.compute.manager [-] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Took 1.32 seconds to deallocate network for instance. [ 2027.894100] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780469, 'name': PowerOffVM_Task, 'duration_secs': 0.261378} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.894317] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2027.894533] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2027.894738] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369514', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'name': 'volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'df8d513d-c201-4ffe-894e-cf8c3318cecc', 'attached_at': '2024-12-11T23:39:20.000000', 'detached_at': '', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'serial': '6a4d7edc-98d8-414f-9347-350d38aa7ea4'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2027.895625] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5449aa-a0f8-40b4-948d-e1cfef4c4442 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.919407] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a74183-af4f-4962-a9dd-69b5d6c819c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.928207] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4445c33-7ad6-47f2-969b-76e42981df7a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.953026] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fcf2bf-03fc-4c94-add4-712231eeeea2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.970355] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] The volume has not been displaced from its original location: [datastore1] volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4/volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2027.975857] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2027.976536] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-874f222e-13b2-4009-b56a-fc3c8f5f2c66 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.998749] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2027.998749] env[63379]: value = "task-1780470" [ 2027.998749] env[63379]: _type = "Task" [ 2027.998749] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.004090] env[63379]: DEBUG nova.objects.instance [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'flavor' on Instance uuid a39c5511-3efc-41e9-8902-692f237557e1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2028.013448] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780470, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.035225] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780466, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.109269] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.109510] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.109764] env[63379]: DEBUG nova.objects.instance [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'resources' on Instance uuid 20649b93-78ac-4805-aa24-5dbfef9d766b {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2028.509984] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780470, 'name': ReconfigVM_Task, 'duration_secs': 0.309064} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.511420] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2028.516303] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea3656a5-a241-4764-a6a9-aa875be2e634 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.536095] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780466, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.433894} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.537272] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3959d3db-eff7-402f-81f5-8f67a00a1f20/3959d3db-eff7-402f-81f5-8f67a00a1f20.vmdk to [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2028.537836] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2028.537836] env[63379]: value = "task-1780471" [ 2028.537836] env[63379]: _type = "Task" [ 2028.537836] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.538542] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d11bb2c-ba0a-4628-8921-de9f7d7881f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.566467] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2028.566821] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780471, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.567389] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9f38fc5-6a60-448c-b06e-3623e48783f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.587644] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2028.587644] env[63379]: value = "task-1780472" [ 2028.587644] env[63379]: _type = "Task" [ 2028.587644] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.595699] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780472, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.734841] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8706e4ba-e93c-4337-a886-def5a4f70ed4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.743039] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a48cfa-1876-4986-9bf0-bb01c0b5a5fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.774970] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047c8928-487b-4e61-a7d1-ca934df259c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.783243] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9fbef4-4b2b-41ca-b8ea-0e8a13ea30c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.797559] env[63379]: DEBUG nova.compute.provider_tree [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2029.017516] env[63379]: DEBUG oslo_concurrency.lockutils [None req-896b482f-64a4-41aa-b4c6-e531e9b5fd18 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.257s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.051396] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780471, 'name': ReconfigVM_Task, 'duration_secs': 0.150889} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.051734] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369514', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'name': 'volume-6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'df8d513d-c201-4ffe-894e-cf8c3318cecc', 'attached_at': '2024-12-11T23:39:20.000000', 'detached_at': '', 'volume_id': '6a4d7edc-98d8-414f-9347-350d38aa7ea4', 'serial': '6a4d7edc-98d8-414f-9347-350d38aa7ea4'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2029.052063] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2029.053041] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4789e911-fc0a-48c3-9637-875c72f3d489 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.060352] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2029.060583] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10f03b7f-3c51-4b6f-a11b-77a1d822426d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.097729] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780472, 'name': ReconfigVM_Task, 'duration_secs': 0.291012} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.098063] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74/10fc842d-b821-4103-b6a5-f5b2fc46ea74.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2029.098732] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d64439d0-6ffd-44a6-950e-0c3e936778f6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.104806] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2029.104806] env[63379]: value = "task-1780474" [ 2029.104806] env[63379]: _type = "Task" [ 2029.104806] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.112178] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780474, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.247791] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2029.248105] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2029.248388] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleting the datastore file [datastore1] df8d513d-c201-4ffe-894e-cf8c3318cecc {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2029.248688] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94890177-75a9-4ff9-b6dc-1da9c43086ce {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.255019] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2029.255019] env[63379]: value = "task-1780475" [ 2029.255019] env[63379]: _type = "Task" [ 2029.255019] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.263074] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780475, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.302390] env[63379]: DEBUG nova.scheduler.client.report [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2029.615922] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780474, 'name': Rename_Task, 'duration_secs': 0.155251} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.616251] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2029.616559] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba2d5289-3d33-4e86-8b2b-d1db878f13a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.624694] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2029.624694] env[63379]: value = "task-1780476" [ 2029.624694] env[63379]: _type = "Task" [ 2029.624694] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.634047] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.765915] env[63379]: DEBUG oslo_vmware.api [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780475, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197345} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.766200] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2029.766425] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2029.766611] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2029.766793] env[63379]: INFO nova.compute.manager [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Took 2.39 seconds to destroy the instance on the hypervisor. [ 2029.767051] env[63379]: DEBUG oslo.service.loopingcall [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2029.767258] env[63379]: DEBUG nova.compute.manager [-] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2029.767356] env[63379]: DEBUG nova.network.neutron [-] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2029.806778] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.697s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.824539] env[63379]: INFO nova.scheduler.client.report [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted allocations for instance 20649b93-78ac-4805-aa24-5dbfef9d766b [ 2030.024893] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "a39c5511-3efc-41e9-8902-692f237557e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.025238] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.025413] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "a39c5511-3efc-41e9-8902-692f237557e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.025654] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.025879] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.028321] env[63379]: INFO nova.compute.manager [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Terminating instance [ 2030.030402] env[63379]: DEBUG nova.compute.manager [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2030.030616] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2030.031513] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6e3381-e679-4ac4-94c7-6abd68866903 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.041166] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2030.041512] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0e7f730-b5b1-4bad-ba8e-6eb1a5493c30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.050755] env[63379]: DEBUG oslo_vmware.api [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 2030.050755] env[63379]: value = "task-1780477" [ 2030.050755] env[63379]: _type = "Task" [ 2030.050755] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.061363] env[63379]: DEBUG oslo_vmware.api [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.136439] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780476, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.335031] env[63379]: DEBUG oslo_concurrency.lockutils [None req-81224508-d004-4611-ae26-03fc05e6a0bd tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "20649b93-78ac-4805-aa24-5dbfef9d766b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.343s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.342997] env[63379]: DEBUG nova.compute.manager [req-c5f5225a-9808-46d6-9d55-9e6671e26e20 req-6cfdab52-908d-493c-9b7b-5a2894e54104 service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Received event network-vif-deleted-43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2030.342997] env[63379]: INFO nova.compute.manager [req-c5f5225a-9808-46d6-9d55-9e6671e26e20 req-6cfdab52-908d-493c-9b7b-5a2894e54104 service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Neutron deleted interface 43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e; detaching it from the instance and deleting it from the info cache [ 2030.342997] env[63379]: DEBUG nova.network.neutron [req-c5f5225a-9808-46d6-9d55-9e6671e26e20 req-6cfdab52-908d-493c-9b7b-5a2894e54104 service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.562065] env[63379]: DEBUG oslo_vmware.api [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780477, 'name': PowerOffVM_Task, 'duration_secs': 0.266073} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.562374] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2030.562715] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2030.562819] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a3ca72d-e094-46c5-a703-cd35000a9dd9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.636545] env[63379]: DEBUG oslo_vmware.api [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780476, 'name': PowerOnVM_Task, 'duration_secs': 0.802111} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.636820] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2030.660872] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2030.661434] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2030.661434] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleting the datastore file [datastore1] a39c5511-3efc-41e9-8902-692f237557e1 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2030.661964] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7efc3f88-2129-4ce7-bd9d-6539034960d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.670612] env[63379]: DEBUG oslo_vmware.api [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for the task: (returnval){ [ 2030.670612] env[63379]: value = "task-1780479" [ 2030.670612] env[63379]: _type = "Task" [ 2030.670612] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.679720] env[63379]: DEBUG oslo_vmware.api [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.747045] env[63379]: DEBUG nova.compute.manager [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2030.748576] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1258abd0-f768-4b1f-b8ae-1ab07d16faec {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.809048] env[63379]: DEBUG nova.network.neutron [-] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.845693] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e835fdc0-57bd-40bc-8ed0-70de57226a6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.856025] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ad5ff7-ae70-4ff3-a6d5-e56298bbb2d0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.891056] env[63379]: DEBUG nova.compute.manager [req-c5f5225a-9808-46d6-9d55-9e6671e26e20 req-6cfdab52-908d-493c-9b7b-5a2894e54104 service nova] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Detach interface failed, port_id=43a4d9a7-51c7-4dbd-8864-2e6fbcb7c13e, reason: Instance df8d513d-c201-4ffe-894e-cf8c3318cecc could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2031.181516] env[63379]: DEBUG oslo_vmware.api [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Task: {'id': task-1780479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201101} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.181780] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2031.181974] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2031.182174] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2031.182361] env[63379]: INFO nova.compute.manager [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2031.182608] env[63379]: DEBUG oslo.service.loopingcall [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.182805] env[63379]: DEBUG nova.compute.manager [-] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2031.182899] env[63379]: DEBUG nova.network.neutron [-] [instance: a39c5511-3efc-41e9-8902-692f237557e1] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2031.196821] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "22296faa-10cf-48fe-a777-95d932987cf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.197071] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "22296faa-10cf-48fe-a777-95d932987cf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.267086] env[63379]: DEBUG oslo_concurrency.lockutils [None req-23366ece-3b9b-4f67-939c-ab55ea36dead tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.463s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.313752] env[63379]: INFO nova.compute.manager [-] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Took 1.55 seconds to deallocate network for instance. [ 2031.699875] env[63379]: DEBUG nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2031.863111] env[63379]: INFO nova.compute.manager [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Took 0.55 seconds to detach 1 volumes for instance. [ 2032.108535] env[63379]: DEBUG nova.network.neutron [-] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.227488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.227771] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.229474] env[63379]: INFO nova.compute.claims [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2032.370027] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.372253] env[63379]: DEBUG nova.compute.manager [req-84065720-0824-4d56-9853-69db406fa316 req-500cd583-ab48-43e6-9624-154877601ba0 service nova] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Received event network-vif-deleted-055e7bb8-6f06-4be0-bf3d-97113330ad89 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2032.611853] env[63379]: INFO nova.compute.manager [-] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Took 1.43 seconds to deallocate network for instance. [ 2032.972027] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b226aea3-69fe-486f-97ee-488ee1544b49 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.980300] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2994d9-8a27-4539-8f95-2f96d5437aac tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Suspending the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2032.980628] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-168ee289-f496-4b79-a2c6-8fe87e02a306 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.988594] env[63379]: DEBUG oslo_vmware.api [None req-8c2994d9-8a27-4539-8f95-2f96d5437aac tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2032.988594] env[63379]: value = "task-1780480" [ 2032.988594] env[63379]: _type = "Task" [ 2032.988594] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.997532] env[63379]: DEBUG oslo_vmware.api [None req-8c2994d9-8a27-4539-8f95-2f96d5437aac tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780480, 'name': SuspendVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.118570] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.359221] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43032b58-0743-4e8c-b611-81f28c93ac03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.367340] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea861c36-b6fd-461f-ba7d-08617902e58e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.398179] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da32601b-121e-4482-b793-fc4fd915b82b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.405869] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4998ae84-8b9f-4d9f-8611-e3d1572ca5a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.420947] env[63379]: DEBUG nova.compute.provider_tree [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2033.500140] env[63379]: DEBUG oslo_vmware.api [None req-8c2994d9-8a27-4539-8f95-2f96d5437aac tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780480, 'name': SuspendVM_Task} progress is 70%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.941132] env[63379]: ERROR nova.scheduler.client.report [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [req-d2612bbe-4df3-4999-a476-ec209b4ad84a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d2612bbe-4df3-4999-a476-ec209b4ad84a"}]} [ 2033.956448] env[63379]: DEBUG nova.scheduler.client.report [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2033.969285] env[63379]: DEBUG nova.scheduler.client.report [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2033.969535] env[63379]: DEBUG nova.compute.provider_tree [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2033.979249] env[63379]: DEBUG nova.scheduler.client.report [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2033.995027] env[63379]: DEBUG nova.scheduler.client.report [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2033.999828] env[63379]: DEBUG oslo_vmware.api [None req-8c2994d9-8a27-4539-8f95-2f96d5437aac tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780480, 'name': SuspendVM_Task, 'duration_secs': 0.623476} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.000093] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-8c2994d9-8a27-4539-8f95-2f96d5437aac tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Suspended the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2034.000283] env[63379]: DEBUG nova.compute.manager [None req-8c2994d9-8a27-4539-8f95-2f96d5437aac tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2034.001119] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec043b1-025b-47cf-b6a3-a686299d4c74 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.099685] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509762ac-b97c-4e56-9db6-7c060b3894f1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.107902] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ecb6ef-051e-4ceb-ab46-1fe052cde91c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.137022] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3edada4-fa77-4fc5-a83b-2df9d59081ca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.144134] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f799c5-785e-4413-baad-fb99e4fb4cd0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.158459] env[63379]: DEBUG nova.compute.provider_tree [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2034.686286] env[63379]: DEBUG nova.scheduler.client.report [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 175 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2034.686667] env[63379]: DEBUG nova.compute.provider_tree [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 175 to 176 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2034.686856] env[63379]: DEBUG nova.compute.provider_tree [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2035.192567] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.965s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.193062] env[63379]: DEBUG nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2035.195840] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.826s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.196085] env[63379]: DEBUG nova.objects.instance [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'resources' on Instance uuid df8d513d-c201-4ffe-894e-cf8c3318cecc {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2035.358476] env[63379]: INFO nova.compute.manager [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Resuming [ 2035.359118] env[63379]: DEBUG nova.objects.instance [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lazy-loading 'flavor' on Instance uuid 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2035.698988] env[63379]: DEBUG nova.compute.utils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2035.703072] env[63379]: DEBUG nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2035.703072] env[63379]: DEBUG nova.network.neutron [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2035.748884] env[63379]: DEBUG nova.policy [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2e7c2125f0044508dc4016c4de224e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9746ae945355479fa5880802e08d2b0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2035.812632] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d607bad1-2a50-49c6-a67d-d7eb74077a92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.821214] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e3b4f5-3af7-4f47-8cf4-ad016d9a7f4c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.856093] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f9305d-7e3d-4e77-9211-30dcd2219971 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.867668] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695523e7-be88-4a0f-a862-711f2bac851e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.882270] env[63379]: DEBUG nova.compute.provider_tree [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2036.014249] env[63379]: DEBUG nova.network.neutron [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Successfully created port: 320b0dff-88f2-4e14-ad0a-778e9489ccce {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2036.206299] env[63379]: DEBUG nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2036.366033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.366033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquired lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.366222] env[63379]: DEBUG nova.network.neutron [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2036.384798] env[63379]: DEBUG nova.scheduler.client.report [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2036.889543] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.693s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.891639] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.773s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.891826] env[63379]: DEBUG nova.objects.instance [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lazy-loading 'resources' on Instance uuid a39c5511-3efc-41e9-8902-692f237557e1 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2036.913758] env[63379]: INFO nova.scheduler.client.report [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted allocations for instance df8d513d-c201-4ffe-894e-cf8c3318cecc [ 2037.216142] env[63379]: DEBUG nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2037.242829] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2037.243096] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2037.243267] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2037.243456] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2037.243611] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2037.243764] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2037.243977] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2037.244189] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2037.244367] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2037.244576] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2037.244802] env[63379]: DEBUG nova.virt.hardware [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2037.245747] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92f00fc-477b-4f31-b062-666ce87ff230 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.256656] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e074f98-0b65-4d59-8116-9c0485c8e493 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.373064] env[63379]: DEBUG nova.network.neutron [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [{"id": "7dc69df5-b3d2-494c-b700-584c31779f9a", "address": "fa:16:3e:b3:a2:43", "network": {"id": "678e0600-c1d0-4fb0-8219-a81a1ca0f4f0", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1244921057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28f7e38c300546a2a7a033cb12c7f89a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dc69df5-b3", "ovs_interfaceid": "7dc69df5-b3d2-494c-b700-584c31779f9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.421849] env[63379]: DEBUG oslo_concurrency.lockutils [None req-77d6082d-ce45-460f-9907-722c7f437199 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "df8d513d-c201-4ffe-894e-cf8c3318cecc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.055s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.480966] env[63379]: DEBUG nova.compute.manager [req-712311fd-885b-48fb-b168-e3a34153ca0b req-349bfced-da71-44e9-9b77-a708d9991e89 service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Received event network-vif-plugged-320b0dff-88f2-4e14-ad0a-778e9489ccce {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2037.481220] env[63379]: DEBUG oslo_concurrency.lockutils [req-712311fd-885b-48fb-b168-e3a34153ca0b req-349bfced-da71-44e9-9b77-a708d9991e89 service nova] Acquiring lock "22296faa-10cf-48fe-a777-95d932987cf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.481426] env[63379]: DEBUG oslo_concurrency.lockutils [req-712311fd-885b-48fb-b168-e3a34153ca0b req-349bfced-da71-44e9-9b77-a708d9991e89 service nova] Lock "22296faa-10cf-48fe-a777-95d932987cf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.481597] env[63379]: DEBUG oslo_concurrency.lockutils [req-712311fd-885b-48fb-b168-e3a34153ca0b req-349bfced-da71-44e9-9b77-a708d9991e89 service nova] Lock "22296faa-10cf-48fe-a777-95d932987cf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.481766] env[63379]: DEBUG nova.compute.manager [req-712311fd-885b-48fb-b168-e3a34153ca0b req-349bfced-da71-44e9-9b77-a708d9991e89 service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] No waiting events found dispatching network-vif-plugged-320b0dff-88f2-4e14-ad0a-778e9489ccce {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2037.482018] env[63379]: WARNING nova.compute.manager [req-712311fd-885b-48fb-b168-e3a34153ca0b req-349bfced-da71-44e9-9b77-a708d9991e89 service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Received unexpected event network-vif-plugged-320b0dff-88f2-4e14-ad0a-778e9489ccce for instance with vm_state building and task_state spawning. [ 2037.505392] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108066f0-1869-48d3-8a1b-2615b685e98d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.514362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c534594a-a9f6-4daa-8e69-9c7d6a176bba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.544847] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab3c93d-2c2f-49b0-87e2-ebdd2de694f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.552793] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9047dfd5-db08-4faf-81e2-5cbac8169b6a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.565979] env[63379]: DEBUG nova.compute.provider_tree [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2037.572931] env[63379]: DEBUG nova.network.neutron [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Successfully updated port: 320b0dff-88f2-4e14-ad0a-778e9489ccce {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2037.875809] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Releasing lock "refresh_cache-10fc842d-b821-4103-b6a5-f5b2fc46ea74" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.876847] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cd6a1a-a5e8-46d7-b21c-ba92cbe0a939 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.884040] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Resuming the VM {{(pid=63379) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2037.884298] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-909d7ce8-1dea-4c09-b25c-62ef28136a9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.892142] env[63379]: DEBUG oslo_vmware.api [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2037.892142] env[63379]: value = "task-1780481" [ 2037.892142] env[63379]: _type = "Task" [ 2037.892142] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.900431] env[63379]: DEBUG oslo_vmware.api [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.069867] env[63379]: DEBUG nova.scheduler.client.report [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2038.073874] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "refresh_cache-22296faa-10cf-48fe-a777-95d932987cf9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.074054] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "refresh_cache-22296faa-10cf-48fe-a777-95d932987cf9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.074163] env[63379]: DEBUG nova.network.neutron [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2038.408436] env[63379]: DEBUG oslo_vmware.api [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780481, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.574867] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.683s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.594041] env[63379]: INFO nova.scheduler.client.report [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Deleted allocations for instance a39c5511-3efc-41e9-8902-692f237557e1 [ 2038.612978] env[63379]: DEBUG nova.network.neutron [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2038.746349] env[63379]: DEBUG nova.network.neutron [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Updating instance_info_cache with network_info: [{"id": "320b0dff-88f2-4e14-ad0a-778e9489ccce", "address": "fa:16:3e:5d:61:87", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320b0dff-88", "ovs_interfaceid": "320b0dff-88f2-4e14-ad0a-778e9489ccce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.843061] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.843303] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.903469] env[63379]: DEBUG oslo_vmware.api [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780481, 'name': PowerOnVM_Task, 'duration_secs': 0.523087} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.903744] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Resumed the VM {{(pid=63379) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2038.903930] env[63379]: DEBUG nova.compute.manager [None req-49dcb9db-bb31-40fa-95eb-ac4e5591bce9 tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2038.904726] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42240721-601b-4b07-be7d-32590dbe92d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.103422] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9db977c2-58eb-454b-a1fd-63445f7693c3 tempest-AttachVolumeNegativeTest-580872197 tempest-AttachVolumeNegativeTest-580872197-project-member] Lock "a39c5511-3efc-41e9-8902-692f237557e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.078s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.248770] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "refresh_cache-22296faa-10cf-48fe-a777-95d932987cf9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.249079] env[63379]: DEBUG nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Instance network_info: |[{"id": "320b0dff-88f2-4e14-ad0a-778e9489ccce", "address": "fa:16:3e:5d:61:87", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320b0dff-88", "ovs_interfaceid": "320b0dff-88f2-4e14-ad0a-778e9489ccce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2039.249515] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:61:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '320b0dff-88f2-4e14-ad0a-778e9489ccce', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2039.257152] env[63379]: DEBUG oslo.service.loopingcall [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2039.257450] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2039.257602] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd0b6a2a-ac0a-4e79-a1a1-2ffbae705edb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.278834] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2039.278834] env[63379]: value = "task-1780482" [ 2039.278834] env[63379]: _type = "Task" [ 2039.278834] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.286912] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780482, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.345280] env[63379]: DEBUG nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2039.574986] env[63379]: DEBUG nova.compute.manager [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Received event network-changed-320b0dff-88f2-4e14-ad0a-778e9489ccce {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2039.575276] env[63379]: DEBUG nova.compute.manager [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Refreshing instance network info cache due to event network-changed-320b0dff-88f2-4e14-ad0a-778e9489ccce. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2039.575509] env[63379]: DEBUG oslo_concurrency.lockutils [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] Acquiring lock "refresh_cache-22296faa-10cf-48fe-a777-95d932987cf9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.575710] env[63379]: DEBUG oslo_concurrency.lockutils [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] Acquired lock "refresh_cache-22296faa-10cf-48fe-a777-95d932987cf9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.575917] env[63379]: DEBUG nova.network.neutron [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Refreshing network info cache for port 320b0dff-88f2-4e14-ad0a-778e9489ccce {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2039.790019] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780482, 'name': CreateVM_Task, 'duration_secs': 0.464303} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.790019] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2039.790518] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.790706] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.791176] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2039.791532] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bef6224e-646c-44bf-bca1-a781ef684e83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.797508] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2039.797508] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce7808-2df2-665b-206b-7b25b685964a" [ 2039.797508] env[63379]: _type = "Task" [ 2039.797508] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.808537] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce7808-2df2-665b-206b-7b25b685964a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.867203] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.867488] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.869128] env[63379]: INFO nova.compute.claims [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2040.310499] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52ce7808-2df2-665b-206b-7b25b685964a, 'name': SearchDatastore_Task, 'duration_secs': 0.012713} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.310837] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.311131] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2040.311336] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.311486] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.311733] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2040.312113] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ba88fb9-09c9-401a-9801-34335020d1b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.321863] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2040.322065] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2040.322794] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-207ad2a7-f1f1-45ff-9e6c-0e7f6800e300 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.328444] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2040.328444] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fdab3c-086e-61a3-87d0-f4fa46d52f58" [ 2040.328444] env[63379]: _type = "Task" [ 2040.328444] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.336943] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fdab3c-086e-61a3-87d0-f4fa46d52f58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.392075] env[63379]: DEBUG nova.network.neutron [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Updated VIF entry in instance network info cache for port 320b0dff-88f2-4e14-ad0a-778e9489ccce. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2040.392592] env[63379]: DEBUG nova.network.neutron [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Updating instance_info_cache with network_info: [{"id": "320b0dff-88f2-4e14-ad0a-778e9489ccce", "address": "fa:16:3e:5d:61:87", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap320b0dff-88", "ovs_interfaceid": "320b0dff-88f2-4e14-ad0a-778e9489ccce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.840179] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52fdab3c-086e-61a3-87d0-f4fa46d52f58, 'name': SearchDatastore_Task, 'duration_secs': 0.009886} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.841021] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c15d9c41-2eff-45bb-af79-c9f58913d282 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.847101] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2040.847101] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]528dc3ae-d4e1-96ab-4bb3-70a01031c0ef" [ 2040.847101] env[63379]: _type = "Task" [ 2040.847101] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.855591] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528dc3ae-d4e1-96ab-4bb3-70a01031c0ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.896494] env[63379]: DEBUG oslo_concurrency.lockutils [req-5a359cc3-86a0-4bd1-812c-5375d7534aa7 req-92e64842-49bd-485e-b77b-8eebbb19fb5b service nova] Releasing lock "refresh_cache-22296faa-10cf-48fe-a777-95d932987cf9" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.987327] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516ff7ba-1c3d-426a-8241-b11bc6af2855 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.995327] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4788a4d2-20d5-4637-835f-8266d7bbf8fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.025551] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87ad2e1-7d58-43b6-b322-f723b8113f5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.033076] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69dbb43a-df20-4a49-bac3-66331321a393 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.047510] env[63379]: DEBUG nova.compute.provider_tree [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2041.357602] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]528dc3ae-d4e1-96ab-4bb3-70a01031c0ef, 'name': SearchDatastore_Task, 'duration_secs': 0.010597} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.357887] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.358169] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 22296faa-10cf-48fe-a777-95d932987cf9/22296faa-10cf-48fe-a777-95d932987cf9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2041.358461] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-badaeaa1-f3b8-4d93-9e0c-78c94f4c0cff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.365618] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2041.365618] env[63379]: value = "task-1780484" [ 2041.365618] env[63379]: _type = "Task" [ 2041.365618] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.373886] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.551524] env[63379]: DEBUG nova.scheduler.client.report [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2041.875723] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.426273} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.876121] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 22296faa-10cf-48fe-a777-95d932987cf9/22296faa-10cf-48fe-a777-95d932987cf9.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2041.876368] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2041.876670] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6e39594-a51b-4dd6-9c96-96b44372a3b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.885085] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2041.885085] env[63379]: value = "task-1780486" [ 2041.885085] env[63379]: _type = "Task" [ 2041.885085] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.894662] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.057045] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.189s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.057584] env[63379]: DEBUG nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2042.395525] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05964} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.395817] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2042.396661] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89614cc-1e6d-4a2f-bce8-d15c83d4a780 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.418197] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 22296faa-10cf-48fe-a777-95d932987cf9/22296faa-10cf-48fe-a777-95d932987cf9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2042.418465] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5831963-1329-4a1d-aaa4-afff3c94b8a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.438091] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2042.438091] env[63379]: value = "task-1780487" [ 2042.438091] env[63379]: _type = "Task" [ 2042.438091] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.446114] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780487, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.562951] env[63379]: DEBUG nova.compute.utils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2042.564500] env[63379]: DEBUG nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2042.564675] env[63379]: DEBUG nova.network.neutron [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2042.612328] env[63379]: DEBUG nova.policy [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99f3906f7b7e47a1a81c5c8f38d5b4ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '645f0e0a5e1a44d59ca9c85da49bb454', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2042.883413] env[63379]: DEBUG nova.network.neutron [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Successfully created port: f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2042.948957] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780487, 'name': ReconfigVM_Task, 'duration_secs': 0.27324} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.949274] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 22296faa-10cf-48fe-a777-95d932987cf9/22296faa-10cf-48fe-a777-95d932987cf9.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2042.949998] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cedbabd7-3f3e-4aaf-8ceb-174cb6cb017d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.956865] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2042.956865] env[63379]: value = "task-1780488" [ 2042.956865] env[63379]: _type = "Task" [ 2042.956865] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.966408] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780488, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.067392] env[63379]: DEBUG nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2043.467869] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780488, 'name': Rename_Task, 'duration_secs': 0.149474} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.469025] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2043.469025] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a86b63ef-45d3-46d8-abd8-4cc3f6cdd361 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.476586] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2043.476586] env[63379]: value = "task-1780489" [ 2043.476586] env[63379]: _type = "Task" [ 2043.476586] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.485602] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780489, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.910031] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.910316] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.910654] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.910760] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.910919] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.913120] env[63379]: INFO nova.compute.manager [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Terminating instance [ 2043.915644] env[63379]: DEBUG nova.compute.manager [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2043.915644] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2043.916060] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944481e7-585a-4289-9757-036263932de7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.926216] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2043.926505] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee035ba7-da82-4bc1-b325-1f12fde570b3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.935538] env[63379]: DEBUG oslo_vmware.api [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2043.935538] env[63379]: value = "task-1780490" [ 2043.935538] env[63379]: _type = "Task" [ 2043.935538] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.948276] env[63379]: DEBUG oslo_vmware.api [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.987282] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780489, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.078435] env[63379]: DEBUG nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2044.109401] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2044.109667] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2044.109887] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2044.110260] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2044.110531] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2044.110748] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2044.111031] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2044.111166] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2044.111355] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2044.111517] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2044.111700] env[63379]: DEBUG nova.virt.hardware [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2044.112605] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee98cf8-7d38-4e3c-a4a6-f597a4ce2fe2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.121292] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b742e2c9-fab2-4c9e-b364-e47ccb578e5c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.320131] env[63379]: DEBUG nova.compute.manager [req-1aa8d34a-0d84-4a2b-a1af-e88e5590ebcd req-f5de158e-1ecb-418d-bcc9-7be9c2c776ef service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-vif-plugged-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2044.320392] env[63379]: DEBUG oslo_concurrency.lockutils [req-1aa8d34a-0d84-4a2b-a1af-e88e5590ebcd req-f5de158e-1ecb-418d-bcc9-7be9c2c776ef service nova] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.320611] env[63379]: DEBUG oslo_concurrency.lockutils [req-1aa8d34a-0d84-4a2b-a1af-e88e5590ebcd req-f5de158e-1ecb-418d-bcc9-7be9c2c776ef service nova] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.320815] env[63379]: DEBUG oslo_concurrency.lockutils [req-1aa8d34a-0d84-4a2b-a1af-e88e5590ebcd req-f5de158e-1ecb-418d-bcc9-7be9c2c776ef service nova] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.321010] env[63379]: DEBUG nova.compute.manager [req-1aa8d34a-0d84-4a2b-a1af-e88e5590ebcd req-f5de158e-1ecb-418d-bcc9-7be9c2c776ef service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] No waiting events found dispatching network-vif-plugged-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2044.321385] env[63379]: WARNING nova.compute.manager [req-1aa8d34a-0d84-4a2b-a1af-e88e5590ebcd req-f5de158e-1ecb-418d-bcc9-7be9c2c776ef service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received unexpected event network-vif-plugged-f4822da6-3551-4e0b-937f-55536f9c7342 for instance with vm_state building and task_state spawning. [ 2044.402434] env[63379]: DEBUG nova.network.neutron [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Successfully updated port: f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2044.447192] env[63379]: DEBUG oslo_vmware.api [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780490, 'name': PowerOffVM_Task, 'duration_secs': 0.203776} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.447453] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2044.447625] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2044.447874] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0aafa634-a195-4dde-952c-c6ab69628b02 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.487537] env[63379]: DEBUG oslo_vmware.api [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780489, 'name': PowerOnVM_Task, 'duration_secs': 0.513491} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.487871] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2044.488190] env[63379]: INFO nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Took 7.27 seconds to spawn the instance on the hypervisor. [ 2044.488393] env[63379]: DEBUG nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2044.489182] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9134613c-41c2-43fa-ab42-7d6adee0a192 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.615885] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2044.616151] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2044.616432] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleting the datastore file [datastore1] 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2044.616749] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9de66fb7-cadc-4d3d-b716-0f3ab37fc437 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.627631] env[63379]: DEBUG oslo_vmware.api [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for the task: (returnval){ [ 2044.627631] env[63379]: value = "task-1780492" [ 2044.627631] env[63379]: _type = "Task" [ 2044.627631] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.635994] env[63379]: DEBUG oslo_vmware.api [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.904890] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.904890] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.905180] env[63379]: DEBUG nova.network.neutron [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2045.009973] env[63379]: INFO nova.compute.manager [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Took 12.80 seconds to build instance. [ 2045.139039] env[63379]: DEBUG oslo_vmware.api [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Task: {'id': task-1780492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413779} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.139039] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2045.139220] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2045.139752] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2045.139752] env[63379]: INFO nova.compute.manager [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2045.139965] env[63379]: DEBUG oslo.service.loopingcall [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2045.140198] env[63379]: DEBUG nova.compute.manager [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2045.140302] env[63379]: DEBUG nova.network.neutron [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2045.475339] env[63379]: DEBUG nova.network.neutron [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2045.512244] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7eda470b-495a-4dbf-8e4b-6f3cc1c03405 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "22296faa-10cf-48fe-a777-95d932987cf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.315s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.611756] env[63379]: DEBUG nova.network.neutron [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4822da6-35", "ovs_interfaceid": "f4822da6-3551-4e0b-937f-55536f9c7342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.858293] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "22296faa-10cf-48fe-a777-95d932987cf9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.858710] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "22296faa-10cf-48fe-a777-95d932987cf9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.858970] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "22296faa-10cf-48fe-a777-95d932987cf9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.859222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "22296faa-10cf-48fe-a777-95d932987cf9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.859448] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "22296faa-10cf-48fe-a777-95d932987cf9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.862469] env[63379]: INFO nova.compute.manager [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Terminating instance [ 2045.864305] env[63379]: DEBUG nova.compute.manager [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2045.864501] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2045.865348] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b4a94b-4dd7-4f34-9f65-e8f6e532a5e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.873666] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2045.873909] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f4c84db-7e3d-43a8-978f-27cd51469ca5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.882424] env[63379]: DEBUG oslo_vmware.api [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2045.882424] env[63379]: value = "task-1780494" [ 2045.882424] env[63379]: _type = "Task" [ 2045.882424] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.891817] env[63379]: DEBUG oslo_vmware.api [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.983373] env[63379]: DEBUG nova.network.neutron [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2046.113887] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.114263] env[63379]: DEBUG nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Instance network_info: |[{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4822da6-35", "ovs_interfaceid": "f4822da6-3551-4e0b-937f-55536f9c7342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2046.114852] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:f3:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4822da6-3551-4e0b-937f-55536f9c7342', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2046.122631] env[63379]: DEBUG oslo.service.loopingcall [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2046.122888] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2046.123142] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5241cab6-9b7e-4af6-ab4a-916058e867b9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.144580] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2046.144580] env[63379]: value = "task-1780495" [ 2046.144580] env[63379]: _type = "Task" [ 2046.144580] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.152748] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780495, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.353020] env[63379]: DEBUG nova.compute.manager [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2046.353255] env[63379]: DEBUG nova.compute.manager [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing instance network info cache due to event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2046.353477] env[63379]: DEBUG oslo_concurrency.lockutils [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] Acquiring lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.353632] env[63379]: DEBUG oslo_concurrency.lockutils [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] Acquired lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.353797] env[63379]: DEBUG nova.network.neutron [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2046.394469] env[63379]: DEBUG oslo_vmware.api [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780494, 'name': PowerOffVM_Task, 'duration_secs': 0.197023} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.394839] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2046.395136] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2046.395486] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3510ac11-2b11-41ee-807f-2f849617d95c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.481637] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.481925] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.482166] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.482363] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.482539] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.484757] env[63379]: INFO nova.compute.manager [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Terminating instance [ 2046.486429] env[63379]: INFO nova.compute.manager [-] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Took 1.35 seconds to deallocate network for instance. [ 2046.487156] env[63379]: DEBUG nova.compute.manager [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2046.487377] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2046.488685] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16456eed-7e96-45e6-aa00-7dd7d6b838ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.499047] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2046.499047] env[63379]: value = "task-1780497" [ 2046.499047] env[63379]: _type = "Task" [ 2046.499047] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.508700] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.564357] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2046.564618] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2046.564896] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleting the datastore file [datastore1] 22296faa-10cf-48fe-a777-95d932987cf9 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2046.565223] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94d6f3d0-5e58-49e6-97a7-032d5614dc90 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.575201] env[63379]: DEBUG oslo_vmware.api [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2046.575201] env[63379]: value = "task-1780498" [ 2046.575201] env[63379]: _type = "Task" [ 2046.575201] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.584076] env[63379]: DEBUG oslo_vmware.api [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.655494] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780495, 'name': CreateVM_Task, 'duration_secs': 0.446886} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.655671] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2046.656494] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.656644] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.656927] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2046.657211] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8487cf03-9e82-4bc3-8041-8a451033486d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.663318] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2046.663318] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e61124-ff05-d747-b299-9b92f38f65d1" [ 2046.663318] env[63379]: _type = "Task" [ 2046.663318] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.673026] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e61124-ff05-d747-b299-9b92f38f65d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.994536] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.994804] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.995037] env[63379]: DEBUG nova.objects.instance [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lazy-loading 'resources' on Instance uuid 10fc842d-b821-4103-b6a5-f5b2fc46ea74 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2047.008997] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780497, 'name': PowerOffVM_Task, 'duration_secs': 0.188001} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.009285] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2047.009488] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2047.009690] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369511', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'name': 'volume-eeee8c0e-4fab-40d1-86c6-51050b04b159', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff', 'attached_at': '2024-12-11T23:39:08.000000', 'detached_at': '', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'serial': 'eeee8c0e-4fab-40d1-86c6-51050b04b159'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2047.010461] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0eeb6c-523a-4eb5-ae88-6ca6f63151e8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.032895] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aade08ac-5cbd-4f7f-88bc-f6f0f584c9a9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.040668] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4802c4bd-c8c2-4e36-b643-414fbe0d8128 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.059662] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c88dc05-3c4b-4090-a993-7cf1a9db8dda {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.075509] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] The volume has not been displaced from its original location: [datastore1] volume-eeee8c0e-4fab-40d1-86c6-51050b04b159/volume-eeee8c0e-4fab-40d1-86c6-51050b04b159.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2047.080833] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2047.081703] env[63379]: DEBUG nova.network.neutron [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updated VIF entry in instance network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2047.082060] env[63379]: DEBUG nova.network.neutron [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4822da6-35", "ovs_interfaceid": "f4822da6-3551-4e0b-937f-55536f9c7342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.083189] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59186a38-5604-4e55-8034-ba9b6b16e99b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.105694] env[63379]: DEBUG oslo_vmware.api [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150575} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.106929] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2047.107150] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2047.107339] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2047.107514] env[63379]: INFO nova.compute.manager [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Took 1.24 seconds to destroy the instance on the hypervisor. [ 2047.107757] env[63379]: DEBUG oslo.service.loopingcall [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2047.108325] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2047.108325] env[63379]: value = "task-1780499" [ 2047.108325] env[63379]: _type = "Task" [ 2047.108325] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.108591] env[63379]: DEBUG nova.compute.manager [-] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2047.108631] env[63379]: DEBUG nova.network.neutron [-] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2047.119214] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780499, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.174053] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e61124-ff05-d747-b299-9b92f38f65d1, 'name': SearchDatastore_Task, 'duration_secs': 0.017626} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.174311] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.174667] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2047.174944] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2047.175108] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2047.175306] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2047.175631] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f69fd07d-1b1f-44fb-9cde-6b4737d1f223 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.188324] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2047.188568] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2047.189362] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ffdb6b2-f0e7-4373-8c5c-07edb3e0ba20 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.195516] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2047.195516] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52977698-5ac0-8395-95ac-21918d0de3ab" [ 2047.195516] env[63379]: _type = "Task" [ 2047.195516] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.203184] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52977698-5ac0-8395-95ac-21918d0de3ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.596606] env[63379]: DEBUG oslo_concurrency.lockutils [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] Releasing lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.596884] env[63379]: DEBUG nova.compute.manager [req-5a9e3fa0-4323-422a-823f-d82e265750bf req-3429ef0a-f060-4f0f-8014-2c02068bcb5f service nova] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Received event network-vif-deleted-7dc69df5-b3d2-494c-b700-584c31779f9a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2047.608627] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b1c14f-7d61-4d9b-a019-e36b5c83874c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.621901] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85705a9e-9581-4dd9-b131-ed0e5ba2ce14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.624775] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780499, 'name': ReconfigVM_Task, 'duration_secs': 0.171452} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.625040] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2047.629879] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a99f3e2-1c8d-40d0-80f8-509a2c428990 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.665733] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c106389a-d2ab-4789-820f-1ef038e77263 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.674681] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03276e59-91a1-453c-b4ca-54c494969104 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.678696] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2047.678696] env[63379]: value = "task-1780500" [ 2047.678696] env[63379]: _type = "Task" [ 2047.678696] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.689471] env[63379]: DEBUG nova.compute.provider_tree [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2047.696709] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.706620] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52977698-5ac0-8395-95ac-21918d0de3ab, 'name': SearchDatastore_Task, 'duration_secs': 0.008602} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.707414] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23c6c48d-29a5-4b6e-90ec-59767aed9bfc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.712775] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2047.712775] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dc21df-465f-f1ba-ac0e-0eb547f8235b" [ 2047.712775] env[63379]: _type = "Task" [ 2047.712775] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.721472] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dc21df-465f-f1ba-ac0e-0eb547f8235b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.887447] env[63379]: DEBUG nova.network.neutron [-] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.190971] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780500, 'name': ReconfigVM_Task, 'duration_secs': 0.157377} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.191340] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369511', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'name': 'volume-eeee8c0e-4fab-40d1-86c6-51050b04b159', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '8b9f070e-11d3-4e2d-a0ce-54bb939a36ff', 'attached_at': '2024-12-11T23:39:08.000000', 'detached_at': '', 'volume_id': 'eeee8c0e-4fab-40d1-86c6-51050b04b159', 'serial': 'eeee8c0e-4fab-40d1-86c6-51050b04b159'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2048.191647] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2048.194858] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8981ed29-039c-41aa-8a9c-c6d298923222 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.202359] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2048.202628] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-315eb5fd-779c-4705-8d3b-bef193f1f723 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.212243] env[63379]: ERROR nova.scheduler.client.report [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] [req-cc3b935e-8f2e-4864-b110-7895b40ab123] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID cf478c89-515f-4372-b90f-4868ab56e978. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cc3b935e-8f2e-4864-b110-7895b40ab123"}]} [ 2048.222799] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52dc21df-465f-f1ba-ac0e-0eb547f8235b, 'name': SearchDatastore_Task, 'duration_secs': 0.010131} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.223080] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.223372] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2048.223690] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6ce52f9-4a67-47ad-8d38-7ab413502f31 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.227709] env[63379]: DEBUG nova.scheduler.client.report [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2048.230662] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2048.230662] env[63379]: value = "task-1780502" [ 2048.230662] env[63379]: _type = "Task" [ 2048.230662] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.239746] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780502, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.241597] env[63379]: DEBUG nova.scheduler.client.report [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2048.241809] env[63379]: DEBUG nova.compute.provider_tree [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2048.255747] env[63379]: DEBUG nova.scheduler.client.report [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2048.273697] env[63379]: DEBUG nova.scheduler.client.report [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2048.375259] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3d7754-a78b-4a4f-9507-81aaec0a9b50 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.381351] env[63379]: DEBUG nova.compute.manager [req-ab84ef0b-d304-42aa-a61c-f565f8908bdd req-de296a70-c7a2-4075-b9fb-3093f3c50d59 service nova] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Received event network-vif-deleted-320b0dff-88f2-4e14-ad0a-778e9489ccce {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2048.387490] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6cac00-4770-4ad5-bf92-8886fcff9a14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.390963] env[63379]: INFO nova.compute.manager [-] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Took 1.28 seconds to deallocate network for instance. [ 2048.420994] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541646b4-29d4-4c92-8d24-8951b3062755 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.429364] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9add3fc6-f608-4ae1-a1c9-841f6a425f44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.444556] env[63379]: DEBUG nova.compute.provider_tree [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2048.458362] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2048.458621] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2048.458813] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleting the datastore file [datastore1] 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2048.459090] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d32be4c-c9fe-4ea1-bd9c-8c6a32d9cff1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.466082] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2048.466082] env[63379]: value = "task-1780503" [ 2048.466082] env[63379]: _type = "Task" [ 2048.466082] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.474697] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.742318] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780502, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.921757] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.976027] env[63379]: DEBUG nova.scheduler.client.report [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 177 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2048.976027] env[63379]: DEBUG nova.compute.provider_tree [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 177 to 178 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2048.976027] env[63379]: DEBUG nova.compute.provider_tree [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2048.981669] env[63379]: DEBUG oslo_vmware.api [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078227} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.982290] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2048.982682] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2048.983011] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2048.983605] env[63379]: INFO nova.compute.manager [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Took 2.50 seconds to destroy the instance on the hypervisor. [ 2048.984135] env[63379]: DEBUG oslo.service.loopingcall [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2048.984470] env[63379]: DEBUG nova.compute.manager [-] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2048.984673] env[63379]: DEBUG nova.network.neutron [-] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2049.252542] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780502, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.486293] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.489s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.487314] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.566s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.488871] env[63379]: DEBUG nova.objects.instance [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'resources' on Instance uuid 22296faa-10cf-48fe-a777-95d932987cf9 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2049.511048] env[63379]: INFO nova.scheduler.client.report [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Deleted allocations for instance 10fc842d-b821-4103-b6a5-f5b2fc46ea74 [ 2049.743077] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780502, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.14576} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.743209] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2049.743837] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2049.743837] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72447126-5dd0-405c-8c08-c9dc7cb80c40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.751179] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2049.751179] env[63379]: value = "task-1780504" [ 2049.751179] env[63379]: _type = "Task" [ 2049.751179] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.759970] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.919420] env[63379]: DEBUG nova.network.neutron [-] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2050.022197] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9e1f4b47-7ff8-4346-be90-8ecd4c21af0d tempest-ServersNegativeTestJSON-1202499421 tempest-ServersNegativeTestJSON-1202499421-project-member] Lock "10fc842d-b821-4103-b6a5-f5b2fc46ea74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.112s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.103303] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acadbb7-6ce6-4a57-ae19-370098cd9376 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.111552] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f30597-a029-49ee-af39-5d814d74abd8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.144334] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416d2596-62b6-461a-9ff8-bb1e5327659a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.152520] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23eafa98-0b31-4585-9edd-82734a5dbc5d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.167260] env[63379]: DEBUG nova.compute.provider_tree [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2050.261838] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067902} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.262221] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2050.263034] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90ca4b4-4ffb-4832-8572-563ceeaa3ed8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.289617] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2050.290187] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0329748-0088-40ec-9191-ad238651e1ed {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.314071] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2050.314071] env[63379]: value = "task-1780505" [ 2050.314071] env[63379]: _type = "Task" [ 2050.314071] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.322956] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.410641] env[63379]: DEBUG nova.compute.manager [req-b4dc9fc8-1031-468d-ad11-214038208630 req-a89c290b-45a8-4b3a-8f8a-a098a99a552c service nova] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Received event network-vif-deleted-05160396-15ed-49fa-b2de-3793f1f45863 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2050.422188] env[63379]: INFO nova.compute.manager [-] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Took 1.44 seconds to deallocate network for instance. [ 2050.700695] env[63379]: DEBUG nova.scheduler.client.report [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updated inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 with generation 178 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2050.701008] env[63379]: DEBUG nova.compute.provider_tree [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating resource provider cf478c89-515f-4372-b90f-4868ab56e978 generation from 178 to 179 during operation: update_inventory {{(pid=63379) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2050.701212] env[63379]: DEBUG nova.compute.provider_tree [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2050.825704] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780505, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.966864] env[63379]: INFO nova.compute.manager [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Took 0.54 seconds to detach 1 volumes for instance. [ 2050.969171] env[63379]: DEBUG nova.compute.manager [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Deleting volume: eeee8c0e-4fab-40d1-86c6-51050b04b159 {{(pid=63379) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 2051.206961] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.719s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.225743] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.226439] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.226439] env[63379]: DEBUG nova.compute.manager [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2051.227216] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8298ee-ece5-44ed-b932-6fafd7a004ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.233977] env[63379]: INFO nova.scheduler.client.report [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted allocations for instance 22296faa-10cf-48fe-a777-95d932987cf9 [ 2051.237335] env[63379]: DEBUG nova.compute.manager [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2051.237715] env[63379]: DEBUG nova.objects.instance [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'flavor' on Instance uuid 0b06665f-befc-4fa3-9eef-2c2f74ba382f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2051.327401] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780505, 'name': ReconfigVM_Task, 'duration_secs': 0.762003} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.327401] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2051.327401] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c9fe985-5d1f-43f0-b9fe-d65c01442d9c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.333839] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2051.333839] env[63379]: value = "task-1780507" [ 2051.333839] env[63379]: _type = "Task" [ 2051.333839] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.344723] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780507, 'name': Rename_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.507210] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.507495] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.507689] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.527905] env[63379]: INFO nova.scheduler.client.report [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted allocations for instance 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff [ 2051.746925] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2051.747157] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ce600ad-468c-45ec-b80d-3f73e9b41184 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.749112] env[63379]: DEBUG oslo_concurrency.lockutils [None req-5a9d9b93-a116-4f30-b7cf-924ffc6e4a66 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "22296faa-10cf-48fe-a777-95d932987cf9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.890s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.755795] env[63379]: DEBUG oslo_vmware.api [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2051.755795] env[63379]: value = "task-1780508" [ 2051.755795] env[63379]: _type = "Task" [ 2051.755795] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.764138] env[63379]: DEBUG oslo_vmware.api [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.844556] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780507, 'name': Rename_Task, 'duration_secs': 0.139221} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.844837] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2051.845112] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f675e90-2b1d-4c12-aee7-741d997eb947 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.852794] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2051.852794] env[63379]: value = "task-1780509" [ 2051.852794] env[63379]: _type = "Task" [ 2051.852794] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.861373] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.037182] env[63379]: DEBUG oslo_concurrency.lockutils [None req-49d1fde5-1a5b-4a90-8d06-e95cefc0d181 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "8b9f070e-11d3-4e2d-a0ce-54bb939a36ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.555s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.266251] env[63379]: DEBUG oslo_vmware.api [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780508, 'name': PowerOffVM_Task, 'duration_secs': 0.180996} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.266541] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2052.266722] env[63379]: DEBUG nova.compute.manager [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2052.267561] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cfe12a-28cd-41c7-a30f-f33f477b91c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.363795] env[63379]: DEBUG oslo_vmware.api [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780509, 'name': PowerOnVM_Task, 'duration_secs': 0.468382} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.364118] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2052.364330] env[63379]: INFO nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Took 8.29 seconds to spawn the instance on the hypervisor. [ 2052.364512] env[63379]: DEBUG nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2052.365362] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89844fc6-ba7f-4047-9eea-e334899f589f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.780535] env[63379]: DEBUG oslo_concurrency.lockutils [None req-83284880-4532-4ee9-a7b1-d050f2dbd4ab tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.554s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.882461] env[63379]: INFO nova.compute.manager [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Took 13.03 seconds to build instance. [ 2052.953858] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "18498244-3385-47dd-8810-b0cc731c3966" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.953858] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "18498244-3385-47dd-8810-b0cc731c3966" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.217582] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "e1681d89-2f55-47b7-9962-55aa169b3d0a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.217866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.218127] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "e1681d89-2f55-47b7-9962-55aa169b3d0a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.218309] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.218537] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.221766] env[63379]: INFO nova.compute.manager [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Terminating instance [ 2053.224548] env[63379]: DEBUG nova.compute.manager [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2053.224859] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2053.226080] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b2475b-0bf0-4689-a862-b3689b82cb5d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.235554] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2053.236030] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26cd99b8-bc79-4776-8638-30b39d97178e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.243311] env[63379]: DEBUG oslo_vmware.api [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2053.243311] env[63379]: value = "task-1780510" [ 2053.243311] env[63379]: _type = "Task" [ 2053.243311] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.257382] env[63379]: DEBUG oslo_vmware.api [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780510, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.385807] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b417f6dc-a2b6-4edb-b2e4-9f500275b2e2 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.542s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.389102] env[63379]: DEBUG nova.compute.manager [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2053.389311] env[63379]: DEBUG nova.compute.manager [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing instance network info cache due to event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2053.389563] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] Acquiring lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.389709] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] Acquired lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.389844] env[63379]: DEBUG nova.network.neutron [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2053.456831] env[63379]: DEBUG nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2053.643465] env[63379]: DEBUG nova.objects.instance [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'flavor' on Instance uuid 0b06665f-befc-4fa3-9eef-2c2f74ba382f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2053.754450] env[63379]: DEBUG oslo_vmware.api [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780510, 'name': PowerOffVM_Task, 'duration_secs': 0.217479} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.754728] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2053.754911] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2053.755198] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4c58213-57df-4f78-ba4c-4ab2628d2c34 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.897273] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2053.897505] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2053.897701] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleting the datastore file [datastore1] e1681d89-2f55-47b7-9962-55aa169b3d0a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2053.898385] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2774a36-371f-4929-b20e-512e1f872ee7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.905708] env[63379]: DEBUG oslo_vmware.api [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2053.905708] env[63379]: value = "task-1780512" [ 2053.905708] env[63379]: _type = "Task" [ 2053.905708] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.915087] env[63379]: DEBUG oslo_vmware.api [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.981284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.981284] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.982158] env[63379]: INFO nova.compute.claims [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2054.136173] env[63379]: DEBUG nova.network.neutron [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updated VIF entry in instance network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2054.137025] env[63379]: DEBUG nova.network.neutron [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4822da6-35", "ovs_interfaceid": "f4822da6-3551-4e0b-937f-55536f9c7342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.148873] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.149034] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2054.149191] env[63379]: DEBUG nova.network.neutron [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2054.149374] env[63379]: DEBUG nova.objects.instance [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'info_cache' on Instance uuid 0b06665f-befc-4fa3-9eef-2c2f74ba382f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2054.415788] env[63379]: DEBUG oslo_vmware.api [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154767} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.416155] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2054.416199] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2054.416377] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2054.416573] env[63379]: INFO nova.compute.manager [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2054.416820] env[63379]: DEBUG oslo.service.loopingcall [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2054.417033] env[63379]: DEBUG nova.compute.manager [-] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2054.417134] env[63379]: DEBUG nova.network.neutron [-] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2054.639705] env[63379]: DEBUG oslo_concurrency.lockutils [req-ed64a68a-60bb-484d-b30b-b66e12b85393 req-5627a934-9033-4684-a915-3d03b1e0a922 service nova] Releasing lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.652920] env[63379]: DEBUG nova.objects.base [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Object Instance<0b06665f-befc-4fa3-9eef-2c2f74ba382f> lazy-loaded attributes: flavor,info_cache {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2055.084680] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e03cdf-9d92-48c8-a3ec-64def62cf901 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.094574] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2571d6-7482-42b2-974d-b554ee30dd62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.127249] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13aa00c2-486d-44f2-9868-aa8384841fdb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.130060] env[63379]: DEBUG nova.network.neutron [-] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.137325] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a071e46-6ffa-4520-9d28-3d85bf70b1e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.152104] env[63379]: DEBUG nova.compute.provider_tree [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2055.409853] env[63379]: DEBUG nova.network.neutron [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updating instance_info_cache with network_info: [{"id": "61782886-48c0-44e0-a33b-122b4323cfe0", "address": "fa:16:3e:f1:66:51", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61782886-48", "ovs_interfaceid": "61782886-48c0-44e0-a33b-122b4323cfe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.421149] env[63379]: DEBUG nova.compute.manager [req-29616d25-1172-4de1-ad4c-01a1a19e95a8 req-dde650e6-9c90-48a5-8168-1116e3fb6c85 service nova] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Received event network-vif-deleted-a482c861-81a4-437a-a78a-27d652a2e57d {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2055.633202] env[63379]: INFO nova.compute.manager [-] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Took 1.22 seconds to deallocate network for instance. [ 2055.654985] env[63379]: DEBUG nova.scheduler.client.report [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2055.913542] env[63379]: DEBUG oslo_concurrency.lockutils [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.139774] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2056.160298] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.180s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.162084] env[63379]: DEBUG nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2056.163798] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.024s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2056.163996] env[63379]: DEBUG nova.objects.instance [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'resources' on Instance uuid e1681d89-2f55-47b7-9962-55aa169b3d0a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2056.417858] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2056.418165] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19dc0b6f-c32c-41ff-8be0-eb13679043ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.426076] env[63379]: DEBUG oslo_vmware.api [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2056.426076] env[63379]: value = "task-1780513" [ 2056.426076] env[63379]: _type = "Task" [ 2056.426076] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.434524] env[63379]: DEBUG oslo_vmware.api [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.667483] env[63379]: DEBUG nova.compute.utils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2056.669014] env[63379]: DEBUG nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2056.669174] env[63379]: DEBUG nova.network.neutron [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2056.711417] env[63379]: DEBUG nova.policy [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2e7c2125f0044508dc4016c4de224e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9746ae945355479fa5880802e08d2b0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2056.746257] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d47f22-3805-4cfd-85f0-630c6891cf2d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.754656] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dd1cd1-0c40-4176-9c5a-507c82a6accd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.785152] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fdbbb5-23ec-473c-b73f-157b5ab642a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.792643] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d57636-1233-4180-a240-56718b6e4591 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.805813] env[63379]: DEBUG nova.compute.provider_tree [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2056.938910] env[63379]: DEBUG oslo_vmware.api [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780513, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.948431] env[63379]: DEBUG nova.network.neutron [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Successfully created port: 6700d3b0-666a-4a4c-9ec7-7e59e38370b3 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2057.176057] env[63379]: DEBUG nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2057.309518] env[63379]: DEBUG nova.scheduler.client.report [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2057.437466] env[63379]: DEBUG oslo_vmware.api [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780513, 'name': PowerOnVM_Task, 'duration_secs': 0.805121} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.437793] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2057.437930] env[63379]: DEBUG nova.compute.manager [None req-24027fed-fb3a-4135-bdff-1088cb93cd84 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2057.438757] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e707e0-093f-4c12-99de-b7d3acb28d42 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.814226] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.650s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.836820] env[63379]: INFO nova.scheduler.client.report [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted allocations for instance e1681d89-2f55-47b7-9962-55aa169b3d0a [ 2058.184121] env[63379]: DEBUG nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2058.212324] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2058.212713] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2058.212842] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2058.213144] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2058.213368] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2058.213571] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2058.213857] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2058.214100] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2058.214299] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2058.214472] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2058.214759] env[63379]: DEBUG nova.virt.hardware [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2058.216184] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b341701e-11b0-4045-841c-d2dedf1e919e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.225588] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d953418-6cc5-45c2-9d80-5f5a29b330ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.345177] env[63379]: DEBUG oslo_concurrency.lockutils [None req-03995ced-4302-4547-b705-5630a4c22f38 tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "e1681d89-2f55-47b7-9962-55aa169b3d0a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.127s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.360113] env[63379]: DEBUG nova.compute.manager [req-e179e378-0a14-41d2-89d0-36e011ac6493 req-f0aa5999-6d80-4954-bada-44e25491f61d service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Received event network-vif-plugged-6700d3b0-666a-4a4c-9ec7-7e59e38370b3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2058.360347] env[63379]: DEBUG oslo_concurrency.lockutils [req-e179e378-0a14-41d2-89d0-36e011ac6493 req-f0aa5999-6d80-4954-bada-44e25491f61d service nova] Acquiring lock "18498244-3385-47dd-8810-b0cc731c3966-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.360558] env[63379]: DEBUG oslo_concurrency.lockutils [req-e179e378-0a14-41d2-89d0-36e011ac6493 req-f0aa5999-6d80-4954-bada-44e25491f61d service nova] Lock "18498244-3385-47dd-8810-b0cc731c3966-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.360849] env[63379]: DEBUG oslo_concurrency.lockutils [req-e179e378-0a14-41d2-89d0-36e011ac6493 req-f0aa5999-6d80-4954-bada-44e25491f61d service nova] Lock "18498244-3385-47dd-8810-b0cc731c3966-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.360918] env[63379]: DEBUG nova.compute.manager [req-e179e378-0a14-41d2-89d0-36e011ac6493 req-f0aa5999-6d80-4954-bada-44e25491f61d service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] No waiting events found dispatching network-vif-plugged-6700d3b0-666a-4a4c-9ec7-7e59e38370b3 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2058.361056] env[63379]: WARNING nova.compute.manager [req-e179e378-0a14-41d2-89d0-36e011ac6493 req-f0aa5999-6d80-4954-bada-44e25491f61d service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Received unexpected event network-vif-plugged-6700d3b0-666a-4a4c-9ec7-7e59e38370b3 for instance with vm_state building and task_state spawning. [ 2058.447772] env[63379]: DEBUG nova.network.neutron [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Successfully updated port: 6700d3b0-666a-4a4c-9ec7-7e59e38370b3 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2058.693710] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f223e99-75c1-4c03-a8f0-6a55c35589d5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.701230] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Suspending the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2058.701482] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ae07c0f6-dbd8-4ae2-b386-8cc6f4099e89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.708497] env[63379]: DEBUG oslo_vmware.api [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2058.708497] env[63379]: value = "task-1780515" [ 2058.708497] env[63379]: _type = "Task" [ 2058.708497] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.716125] env[63379]: DEBUG oslo_vmware.api [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780515, 'name': SuspendVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.950242] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "refresh_cache-18498244-3385-47dd-8810-b0cc731c3966" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.950464] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "refresh_cache-18498244-3385-47dd-8810-b0cc731c3966" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.950581] env[63379]: DEBUG nova.network.neutron [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2059.220353] env[63379]: DEBUG oslo_vmware.api [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780515, 'name': SuspendVM_Task} progress is 70%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.481102] env[63379]: DEBUG nova.network.neutron [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2059.602419] env[63379]: DEBUG nova.network.neutron [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Updating instance_info_cache with network_info: [{"id": "6700d3b0-666a-4a4c-9ec7-7e59e38370b3", "address": "fa:16:3e:3d:64:b4", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6700d3b0-66", "ovs_interfaceid": "6700d3b0-666a-4a4c-9ec7-7e59e38370b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.719538] env[63379]: DEBUG oslo_vmware.api [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780515, 'name': SuspendVM_Task} progress is 70%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.104814] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "refresh_cache-18498244-3385-47dd-8810-b0cc731c3966" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.105170] env[63379]: DEBUG nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Instance network_info: |[{"id": "6700d3b0-666a-4a4c-9ec7-7e59e38370b3", "address": "fa:16:3e:3d:64:b4", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6700d3b0-66", "ovs_interfaceid": "6700d3b0-666a-4a4c-9ec7-7e59e38370b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2060.105627] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:64:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6700d3b0-666a-4a4c-9ec7-7e59e38370b3', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2060.113355] env[63379]: DEBUG oslo.service.loopingcall [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2060.113535] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2060.113762] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4fe8df6-8494-41c7-832f-ef08a24c92e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.134906] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2060.134906] env[63379]: value = "task-1780516" [ 2060.134906] env[63379]: _type = "Task" [ 2060.134906] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.143382] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780516, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.219785] env[63379]: DEBUG oslo_vmware.api [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780515, 'name': SuspendVM_Task, 'duration_secs': 1.194173} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.220073] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Suspended the VM {{(pid=63379) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2060.220270] env[63379]: DEBUG nova.compute.manager [None req-860b3220-a14f-46cb-9ae5-2ac4d629ddfb tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2060.221085] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693c53cc-bfe4-4c6f-bc52-7a42542f1b5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.386341] env[63379]: DEBUG nova.compute.manager [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Received event network-changed-6700d3b0-666a-4a4c-9ec7-7e59e38370b3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2060.386408] env[63379]: DEBUG nova.compute.manager [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Refreshing instance network info cache due to event network-changed-6700d3b0-666a-4a4c-9ec7-7e59e38370b3. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2060.386636] env[63379]: DEBUG oslo_concurrency.lockutils [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] Acquiring lock "refresh_cache-18498244-3385-47dd-8810-b0cc731c3966" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.386800] env[63379]: DEBUG oslo_concurrency.lockutils [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] Acquired lock "refresh_cache-18498244-3385-47dd-8810-b0cc731c3966" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.386982] env[63379]: DEBUG nova.network.neutron [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Refreshing network info cache for port 6700d3b0-666a-4a4c-9ec7-7e59e38370b3 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2060.647167] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780516, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.043145] env[63379]: INFO nova.compute.manager [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Resuming [ 2061.043768] env[63379]: DEBUG nova.objects.instance [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'flavor' on Instance uuid 0b06665f-befc-4fa3-9eef-2c2f74ba382f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2061.146820] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780516, 'name': CreateVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.147663] env[63379]: DEBUG nova.network.neutron [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Updated VIF entry in instance network info cache for port 6700d3b0-666a-4a4c-9ec7-7e59e38370b3. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2061.148010] env[63379]: DEBUG nova.network.neutron [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Updating instance_info_cache with network_info: [{"id": "6700d3b0-666a-4a4c-9ec7-7e59e38370b3", "address": "fa:16:3e:3d:64:b4", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6700d3b0-66", "ovs_interfaceid": "6700d3b0-666a-4a4c-9ec7-7e59e38370b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.647625] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780516, 'name': CreateVM_Task, 'duration_secs': 1.37675} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.648027] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2061.648519] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.648716] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.649092] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2061.649363] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4eebc929-66cd-4ebb-a9a0-f7154737da77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.651246] env[63379]: DEBUG oslo_concurrency.lockutils [req-f3fa237c-a341-4e54-9292-5886193fdeb7 req-662e4688-b3d5-4f57-b6b5-3b2b49a51f69 service nova] Releasing lock "refresh_cache-18498244-3385-47dd-8810-b0cc731c3966" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.654882] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2061.654882] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527dce27-31bb-f8c5-3dcd-f7a83cc37a43" [ 2061.654882] env[63379]: _type = "Task" [ 2061.654882] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.663567] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527dce27-31bb-f8c5-3dcd-f7a83cc37a43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.913634] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "510db409-0b4c-494a-8084-39ef3cd6c918" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.913634] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "510db409-0b4c-494a-8084-39ef3cd6c918" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.913847] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "510db409-0b4c-494a-8084-39ef3cd6c918-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.913951] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "510db409-0b4c-494a-8084-39ef3cd6c918-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.914138] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "510db409-0b4c-494a-8084-39ef3cd6c918-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.916234] env[63379]: INFO nova.compute.manager [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Terminating instance [ 2061.918410] env[63379]: DEBUG nova.compute.manager [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2061.918604] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2061.919462] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53760cfa-7b6e-4fe8-a8c8-3ab10321b999 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.927176] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2061.927402] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49ab8e60-57ba-405a-9cd7-ef12378cdc89 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.934425] env[63379]: DEBUG oslo_vmware.api [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2061.934425] env[63379]: value = "task-1780517" [ 2061.934425] env[63379]: _type = "Task" [ 2061.934425] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.942309] env[63379]: DEBUG oslo_vmware.api [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.053654] env[63379]: DEBUG oslo_concurrency.lockutils [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.053891] env[63379]: DEBUG oslo_concurrency.lockutils [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquired lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.054045] env[63379]: DEBUG nova.network.neutron [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2062.167133] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527dce27-31bb-f8c5-3dcd-f7a83cc37a43, 'name': SearchDatastore_Task, 'duration_secs': 0.011898} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.167522] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.167811] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2062.168104] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.168296] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.168518] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2062.168846] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b564da18-622a-4704-b6cf-749b227d0e5f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.178010] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2062.178224] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2062.178974] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04e5cbae-1b80-408d-abea-624beccb8c60 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.184817] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2062.184817] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b4cc1e-9edf-d2aa-9daf-4a2bf3ef0180" [ 2062.184817] env[63379]: _type = "Task" [ 2062.184817] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.193310] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b4cc1e-9edf-d2aa-9daf-4a2bf3ef0180, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.445647] env[63379]: DEBUG oslo_vmware.api [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780517, 'name': PowerOffVM_Task, 'duration_secs': 0.214653} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.445892] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2062.446080] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2062.446341] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a9e8bad-d4d6-4499-96ff-7e3a5d1e7931 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.547020] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2062.547286] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2062.547454] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleting the datastore file [datastore1] 510db409-0b4c-494a-8084-39ef3cd6c918 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2062.547730] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48fee900-e0d7-445e-a170-900134d0a56c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.555358] env[63379]: DEBUG oslo_vmware.api [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for the task: (returnval){ [ 2062.555358] env[63379]: value = "task-1780519" [ 2062.555358] env[63379]: _type = "Task" [ 2062.555358] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.566953] env[63379]: DEBUG oslo_vmware.api [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780519, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.695727] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b4cc1e-9edf-d2aa-9daf-4a2bf3ef0180, 'name': SearchDatastore_Task, 'duration_secs': 0.009576} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.696508] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8a8dcbc-003f-4cc8-b330-b164c416c6c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.701899] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2062.701899] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524a1faa-e468-d0d3-b2f7-623b16f8973c" [ 2062.701899] env[63379]: _type = "Task" [ 2062.701899] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.709936] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524a1faa-e468-d0d3-b2f7-623b16f8973c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.774697] env[63379]: DEBUG nova.network.neutron [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updating instance_info_cache with network_info: [{"id": "61782886-48c0-44e0-a33b-122b4323cfe0", "address": "fa:16:3e:f1:66:51", "network": {"id": "c67e6fb1-ba3e-4494-b459-ecd555f3bf64", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1864563188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c01c5c8c3734c4ea066324e542e7374", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6934071-bf85-4591-9c7d-55c7ea131262", "external-id": "nsx-vlan-transportzone-452", "segmentation_id": 452, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61782886-48", "ovs_interfaceid": "61782886-48c0-44e0-a33b-122b4323cfe0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.066414] env[63379]: DEBUG oslo_vmware.api [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Task: {'id': task-1780519, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131172} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.066679] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2063.066886] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2063.067107] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2063.067297] env[63379]: INFO nova.compute.manager [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2063.067548] env[63379]: DEBUG oslo.service.loopingcall [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2063.067748] env[63379]: DEBUG nova.compute.manager [-] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2063.067846] env[63379]: DEBUG nova.network.neutron [-] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2063.214040] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524a1faa-e468-d0d3-b2f7-623b16f8973c, 'name': SearchDatastore_Task, 'duration_secs': 0.009393} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.214360] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.214643] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 18498244-3385-47dd-8810-b0cc731c3966/18498244-3385-47dd-8810-b0cc731c3966.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2063.214928] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d897a55-1ca6-4f98-a033-c36679aa7793 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.222259] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2063.222259] env[63379]: value = "task-1780520" [ 2063.222259] env[63379]: _type = "Task" [ 2063.222259] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.230587] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.277619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Releasing lock "refresh_cache-0b06665f-befc-4fa3-9eef-2c2f74ba382f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.278603] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27da289f-7a0a-4d20-ac3f-248cdd4737ac {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.286732] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Resuming the VM {{(pid=63379) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2063.286997] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-924024db-8b66-4a84-9f75-914fd7d74dc3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.293600] env[63379]: DEBUG nova.compute.manager [req-648c2765-9026-49dd-bc86-8ad16ccfb4d8 req-b546d055-b1d0-4e27-bb62-43f60323cf8f service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Received event network-vif-deleted-6cdabd2b-f665-46a9-a86e-2527cfe452bf {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2063.293816] env[63379]: INFO nova.compute.manager [req-648c2765-9026-49dd-bc86-8ad16ccfb4d8 req-b546d055-b1d0-4e27-bb62-43f60323cf8f service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Neutron deleted interface 6cdabd2b-f665-46a9-a86e-2527cfe452bf; detaching it from the instance and deleting it from the info cache [ 2063.294027] env[63379]: DEBUG nova.network.neutron [req-648c2765-9026-49dd-bc86-8ad16ccfb4d8 req-b546d055-b1d0-4e27-bb62-43f60323cf8f service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.296433] env[63379]: DEBUG oslo_vmware.api [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2063.296433] env[63379]: value = "task-1780521" [ 2063.296433] env[63379]: _type = "Task" [ 2063.296433] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.305634] env[63379]: DEBUG oslo_vmware.api [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.733863] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472644} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.734298] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 18498244-3385-47dd-8810-b0cc731c3966/18498244-3385-47dd-8810-b0cc731c3966.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2063.734532] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2063.734825] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3ca4f03-f6b6-453c-a8b5-83202be1c5e5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.743717] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2063.743717] env[63379]: value = "task-1780522" [ 2063.743717] env[63379]: _type = "Task" [ 2063.743717] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.753723] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780522, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.770326] env[63379]: DEBUG nova.network.neutron [-] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.798383] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f229eab-549d-40d0-b4b4-17c4dd33350c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.815490] env[63379]: DEBUG oslo_vmware.api [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780521, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.821633] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24424c17-2f1d-4b2c-90f1-ad38f2ac55c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.853446] env[63379]: DEBUG nova.compute.manager [req-648c2765-9026-49dd-bc86-8ad16ccfb4d8 req-b546d055-b1d0-4e27-bb62-43f60323cf8f service nova] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Detach interface failed, port_id=6cdabd2b-f665-46a9-a86e-2527cfe452bf, reason: Instance 510db409-0b4c-494a-8084-39ef3cd6c918 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2064.254739] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780522, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.353666} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.255088] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2064.255872] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9843ff8-e945-4e0a-91e3-e64792e42f2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.278432] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 18498244-3385-47dd-8810-b0cc731c3966/18498244-3385-47dd-8810-b0cc731c3966.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2064.278897] env[63379]: INFO nova.compute.manager [-] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Took 1.21 seconds to deallocate network for instance. [ 2064.279136] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-023ff452-e0cf-4770-8742-b35ea8a6fcd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.303431] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2064.303431] env[63379]: value = "task-1780523" [ 2064.303431] env[63379]: _type = "Task" [ 2064.303431] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.310162] env[63379]: DEBUG oslo_vmware.api [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780521, 'name': PowerOnVM_Task, 'duration_secs': 0.562368} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.310859] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Resumed the VM {{(pid=63379) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2064.311159] env[63379]: DEBUG nova.compute.manager [None req-87a5bbd9-6c06-43b4-a53e-5d14e5155198 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2064.311991] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183e6a3e-284d-444c-aca2-c939c4fe3bc1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.317655] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780523, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.799219] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.799532] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.799713] env[63379]: DEBUG nova.objects.instance [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lazy-loading 'resources' on Instance uuid 510db409-0b4c-494a-8084-39ef3cd6c918 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2064.814521] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.315816] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780523, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.377402] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c361e5-5eb4-41f2-9b0f-7d7b20236506 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.385743] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc9df59-31c2-472d-a407-83837a77b303 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.416568] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d2dfa3-71b7-425f-b2d8-cca8cf6e95f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.424229] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07604336-a592-4926-93e9-910708ad7e18 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.437559] env[63379]: DEBUG nova.compute.provider_tree [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2065.726859] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.727170] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.727400] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.727588] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.727764] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.730321] env[63379]: INFO nova.compute.manager [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Terminating instance [ 2065.732033] env[63379]: DEBUG nova.compute.manager [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2065.732274] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2065.733105] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffb201b-0b45-4eb6-9239-a1bf5a04cf63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.740992] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2065.741239] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20fc74db-a27a-4571-81f1-96bb339a19a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.748286] env[63379]: DEBUG oslo_vmware.api [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2065.748286] env[63379]: value = "task-1780524" [ 2065.748286] env[63379]: _type = "Task" [ 2065.748286] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.756253] env[63379]: DEBUG oslo_vmware.api [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.814203] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780523, 'name': ReconfigVM_Task, 'duration_secs': 1.041314} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.814597] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 18498244-3385-47dd-8810-b0cc731c3966/18498244-3385-47dd-8810-b0cc731c3966.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2065.815093] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba652ebc-6670-44e3-ad56-155afea72d75 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.822650] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2065.822650] env[63379]: value = "task-1780525" [ 2065.822650] env[63379]: _type = "Task" [ 2065.822650] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.831801] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780525, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.941241] env[63379]: DEBUG nova.scheduler.client.report [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2066.259494] env[63379]: DEBUG oslo_vmware.api [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780524, 'name': PowerOffVM_Task, 'duration_secs': 0.171971} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.259775] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2066.259957] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2066.260230] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-629214ab-ef33-45af-a838-6ca754f5141e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.332487] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780525, 'name': Rename_Task, 'duration_secs': 0.131802} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.332779] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2066.333040] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6806049f-9a06-463e-8920-21a1e37e33d7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.340933] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2066.340933] env[63379]: value = "task-1780527" [ 2066.340933] env[63379]: _type = "Task" [ 2066.340933] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.348751] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780527, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.365038] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2066.365038] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2066.365038] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleting the datastore file [datastore1] 0b06665f-befc-4fa3-9eef-2c2f74ba382f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2066.365038] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d353ae08-7803-4d56-bfc0-ed45cf56aaa9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.371618] env[63379]: DEBUG oslo_vmware.api [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for the task: (returnval){ [ 2066.371618] env[63379]: value = "task-1780528" [ 2066.371618] env[63379]: _type = "Task" [ 2066.371618] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.380666] env[63379]: DEBUG oslo_vmware.api [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.446973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.466134] env[63379]: INFO nova.scheduler.client.report [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Deleted allocations for instance 510db409-0b4c-494a-8084-39ef3cd6c918 [ 2066.851141] env[63379]: DEBUG oslo_vmware.api [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780527, 'name': PowerOnVM_Task, 'duration_secs': 0.444767} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.851511] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2066.851628] env[63379]: INFO nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Took 8.67 seconds to spawn the instance on the hypervisor. [ 2066.851811] env[63379]: DEBUG nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2066.852600] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aca2ec9-5d92-4604-89d6-7f169f4959c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.881212] env[63379]: DEBUG oslo_vmware.api [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Task: {'id': task-1780528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153248} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.881466] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2066.881658] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2066.881843] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2066.882038] env[63379]: INFO nova.compute.manager [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2066.882292] env[63379]: DEBUG oslo.service.loopingcall [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.882487] env[63379]: DEBUG nova.compute.manager [-] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2066.882585] env[63379]: DEBUG nova.network.neutron [-] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2066.973741] env[63379]: DEBUG oslo_concurrency.lockutils [None req-fb704f5f-e6f8-4738-881c-698e428423df tempest-ServerActionsTestOtherA-156498572 tempest-ServerActionsTestOtherA-156498572-project-member] Lock "510db409-0b4c-494a-8084-39ef3cd6c918" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.060s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.376264] env[63379]: INFO nova.compute.manager [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Took 13.41 seconds to build instance. [ 2067.395199] env[63379]: DEBUG nova.compute.manager [req-76ff17aa-8d61-4020-bfc8-4653cf6ae6ee req-2b1bac4f-1ebd-4fb0-9240-5eb4ed960129 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Received event network-vif-deleted-61782886-48c0-44e0-a33b-122b4323cfe0 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2067.395424] env[63379]: INFO nova.compute.manager [req-76ff17aa-8d61-4020-bfc8-4653cf6ae6ee req-2b1bac4f-1ebd-4fb0-9240-5eb4ed960129 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Neutron deleted interface 61782886-48c0-44e0-a33b-122b4323cfe0; detaching it from the instance and deleting it from the info cache [ 2067.395601] env[63379]: DEBUG nova.network.neutron [req-76ff17aa-8d61-4020-bfc8-4653cf6ae6ee req-2b1bac4f-1ebd-4fb0-9240-5eb4ed960129 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.875414] env[63379]: DEBUG nova.network.neutron [-] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.883066] env[63379]: DEBUG oslo_concurrency.lockutils [None req-38af47ff-06f9-4e04-9a74-3825c7e11d82 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "18498244-3385-47dd-8810-b0cc731c3966" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.927s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.899149] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b98d27f1-dbfe-4c7b-a3bb-b725685c5d06 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.917570] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65eba8b-ffc0-4290-95c5-66ae2d1d36dc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.950812] env[63379]: DEBUG nova.compute.manager [req-76ff17aa-8d61-4020-bfc8-4653cf6ae6ee req-2b1bac4f-1ebd-4fb0-9240-5eb4ed960129 service nova] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Detach interface failed, port_id=61782886-48c0-44e0-a33b-122b4323cfe0, reason: Instance 0b06665f-befc-4fa3-9eef-2c2f74ba382f could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2068.383377] env[63379]: INFO nova.compute.manager [-] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Took 1.50 seconds to deallocate network for instance. [ 2068.887638] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.887931] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.888180] env[63379]: DEBUG nova.objects.instance [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lazy-loading 'resources' on Instance uuid 0b06665f-befc-4fa3-9eef-2c2f74ba382f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2068.961881] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "d7bd6d27-b949-42a8-b191-59a82e8624a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.962135] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.464052] env[63379]: DEBUG nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2069.471204] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7821bab2-48dc-4749-b4eb-20cd596a3fc3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.479949] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71b6417-df1d-4c19-b798-d817c94224d8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.511595] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778f2ba4-d4cf-43cf-9121-4dff49bf272d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.520488] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f774f26-4f49-4623-a407-4eea5318025d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.536981] env[63379]: DEBUG nova.compute.provider_tree [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2069.987352] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2070.040070] env[63379]: DEBUG nova.scheduler.client.report [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2070.545110] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.548015] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.561s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.549728] env[63379]: INFO nova.compute.claims [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2070.567986] env[63379]: INFO nova.scheduler.client.report [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Deleted allocations for instance 0b06665f-befc-4fa3-9eef-2c2f74ba382f [ 2071.078785] env[63379]: DEBUG oslo_concurrency.lockutils [None req-b8fa3042-124f-460d-a171-7b10a4f0dda4 tempest-ServerActionsTestJSON-1145277061 tempest-ServerActionsTestJSON-1145277061-project-member] Lock "0b06665f-befc-4fa3-9eef-2c2f74ba382f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.351s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.622867] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde38015-6952-4eb8-9f36-5c0783f63ad6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.631283] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14ee358-1dc0-4b5e-8ffd-423d5438dc3b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.661798] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359231b1-36e8-4400-a1b5-ee30ea7a6431 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.669644] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c968bb2-0338-484d-9503-b9703f790ae6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.684578] env[63379]: DEBUG nova.compute.provider_tree [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2072.187436] env[63379]: DEBUG nova.scheduler.client.report [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2072.693162] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.145s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.693451] env[63379]: DEBUG nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2073.199064] env[63379]: DEBUG nova.compute.utils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2073.200215] env[63379]: DEBUG nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2073.200424] env[63379]: DEBUG nova.network.neutron [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2073.246725] env[63379]: DEBUG nova.policy [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2e7c2125f0044508dc4016c4de224e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9746ae945355479fa5880802e08d2b0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2073.494556] env[63379]: DEBUG nova.network.neutron [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Successfully created port: 9e983951-c3d6-4a3e-8a8d-e67fb940734b {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2073.704152] env[63379]: DEBUG nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2074.713508] env[63379]: DEBUG nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2074.740022] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2074.740314] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2074.740480] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2074.740668] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2074.740821] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2074.740975] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2074.741218] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2074.741381] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2074.741555] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2074.741725] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2074.741909] env[63379]: DEBUG nova.virt.hardware [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2074.742815] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307e548e-10ba-4c82-a3d8-feac63e2ed1e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.751254] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fbea83-4c07-4e39-bb6e-e65de91a8db5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.131873] env[63379]: DEBUG nova.compute.manager [req-ded32b35-b53c-4dbc-9aee-b1c585ac8225 req-69885b69-3951-4a72-b2a8-9d601ce68aaa service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Received event network-vif-plugged-9e983951-c3d6-4a3e-8a8d-e67fb940734b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2075.133602] env[63379]: DEBUG oslo_concurrency.lockutils [req-ded32b35-b53c-4dbc-9aee-b1c585ac8225 req-69885b69-3951-4a72-b2a8-9d601ce68aaa service nova] Acquiring lock "d7bd6d27-b949-42a8-b191-59a82e8624a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.133871] env[63379]: DEBUG oslo_concurrency.lockutils [req-ded32b35-b53c-4dbc-9aee-b1c585ac8225 req-69885b69-3951-4a72-b2a8-9d601ce68aaa service nova] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.134106] env[63379]: DEBUG oslo_concurrency.lockutils [req-ded32b35-b53c-4dbc-9aee-b1c585ac8225 req-69885b69-3951-4a72-b2a8-9d601ce68aaa service nova] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.134329] env[63379]: DEBUG nova.compute.manager [req-ded32b35-b53c-4dbc-9aee-b1c585ac8225 req-69885b69-3951-4a72-b2a8-9d601ce68aaa service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] No waiting events found dispatching network-vif-plugged-9e983951-c3d6-4a3e-8a8d-e67fb940734b {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2075.134547] env[63379]: WARNING nova.compute.manager [req-ded32b35-b53c-4dbc-9aee-b1c585ac8225 req-69885b69-3951-4a72-b2a8-9d601ce68aaa service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Received unexpected event network-vif-plugged-9e983951-c3d6-4a3e-8a8d-e67fb940734b for instance with vm_state building and task_state spawning. [ 2075.233437] env[63379]: DEBUG nova.network.neutron [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Successfully updated port: 9e983951-c3d6-4a3e-8a8d-e67fb940734b {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2075.737059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "refresh_cache-d7bd6d27-b949-42a8-b191-59a82e8624a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2075.737059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "refresh_cache-d7bd6d27-b949-42a8-b191-59a82e8624a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2075.737059] env[63379]: DEBUG nova.network.neutron [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2075.963964] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.268529] env[63379]: DEBUG nova.network.neutron [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2076.388250] env[63379]: DEBUG nova.network.neutron [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Updating instance_info_cache with network_info: [{"id": "9e983951-c3d6-4a3e-8a8d-e67fb940734b", "address": "fa:16:3e:36:14:f0", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e983951-c3", "ovs_interfaceid": "9e983951-c3d6-4a3e-8a8d-e67fb940734b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2076.466801] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.467040] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.467187] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.467344] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2076.468247] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d199c24-e506-4f72-ba32-78de5ee4dc26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.476845] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a731e4-9134-4967-a4e6-3c2d11e88896 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.490576] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f8bc7c-5235-45ac-8a8a-60043cf466e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.496853] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2e3d45-ba0b-42c5-99c0-171a7e62a475 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.524584] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180217MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2076.524734] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.524922] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.891084] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "refresh_cache-d7bd6d27-b949-42a8-b191-59a82e8624a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2076.891475] env[63379]: DEBUG nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Instance network_info: |[{"id": "9e983951-c3d6-4a3e-8a8d-e67fb940734b", "address": "fa:16:3e:36:14:f0", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e983951-c3", "ovs_interfaceid": "9e983951-c3d6-4a3e-8a8d-e67fb940734b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2076.891898] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:14:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e983951-c3d6-4a3e-8a8d-e67fb940734b', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2076.899443] env[63379]: DEBUG oslo.service.loopingcall [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2076.899645] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2076.900337] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63c3025d-d57c-492c-9e66-7a83a434e9c5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.919995] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2076.919995] env[63379]: value = "task-1780529" [ 2076.919995] env[63379]: _type = "Task" [ 2076.919995] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.927603] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780529, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.162180] env[63379]: DEBUG nova.compute.manager [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Received event network-changed-9e983951-c3d6-4a3e-8a8d-e67fb940734b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2077.162444] env[63379]: DEBUG nova.compute.manager [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Refreshing instance network info cache due to event network-changed-9e983951-c3d6-4a3e-8a8d-e67fb940734b. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2077.162694] env[63379]: DEBUG oslo_concurrency.lockutils [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] Acquiring lock "refresh_cache-d7bd6d27-b949-42a8-b191-59a82e8624a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.162855] env[63379]: DEBUG oslo_concurrency.lockutils [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] Acquired lock "refresh_cache-d7bd6d27-b949-42a8-b191-59a82e8624a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.163038] env[63379]: DEBUG nova.network.neutron [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Refreshing network info cache for port 9e983951-c3d6-4a3e-8a8d-e67fb940734b {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2077.430866] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780529, 'name': CreateVM_Task, 'duration_secs': 0.339512} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.431056] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2077.431917] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.432108] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.432434] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2077.432693] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed2a6c8-c0cc-4150-9127-c247bf6169e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.437697] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2077.437697] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]527d7056-add6-639e-f6ee-d54643b4fd13" [ 2077.437697] env[63379]: _type = "Task" [ 2077.437697] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.445852] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527d7056-add6-639e-f6ee-d54643b4fd13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.552077] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 85ecb409-ab53-43d9-8120-2f8c7402d74c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.552250] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 7f0c426b-1ce3-469f-8ee1-6dd2178f014e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.552374] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 18498244-3385-47dd-8810-b0cc731c3966 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.552493] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance d7bd6d27-b949-42a8-b191-59a82e8624a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.552674] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2077.552812] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2077.605676] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bc0b0c-96ba-4605-8fe0-e1ca194958b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.613310] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1724a52-7242-4e3e-9685-61894b4d839a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.642372] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227a9c91-1ce0-483c-adf4-19bc9079f063 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.649413] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5935001a-89d6-4b2e-9303-ad8be5981d41 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.662257] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2077.843896] env[63379]: DEBUG nova.network.neutron [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Updated VIF entry in instance network info cache for port 9e983951-c3d6-4a3e-8a8d-e67fb940734b. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2077.844272] env[63379]: DEBUG nova.network.neutron [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Updating instance_info_cache with network_info: [{"id": "9e983951-c3d6-4a3e-8a8d-e67fb940734b", "address": "fa:16:3e:36:14:f0", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e983951-c3", "ovs_interfaceid": "9e983951-c3d6-4a3e-8a8d-e67fb940734b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.948757] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]527d7056-add6-639e-f6ee-d54643b4fd13, 'name': SearchDatastore_Task, 'duration_secs': 0.011902} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.949169] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.949284] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2077.949527] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.949679] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.949861] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2077.950162] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a57ec3e8-228b-4f81-a30d-ace18ffc8a39 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.958415] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2077.958590] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2077.959309] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0951d02-2373-42e8-a732-e2f6bfd22125 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.966493] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2077.966493] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]529ef9b7-4b8d-74e7-14a6-f2cf4b89a51d" [ 2077.966493] env[63379]: _type = "Task" [ 2077.966493] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.974288] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529ef9b7-4b8d-74e7-14a6-f2cf4b89a51d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.164954] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2078.347172] env[63379]: DEBUG oslo_concurrency.lockutils [req-112fd1ca-5736-4143-9199-133f4ba83630 req-88f5d67f-45f8-4aac-bbaa-243d5ac7332b service nova] Releasing lock "refresh_cache-d7bd6d27-b949-42a8-b191-59a82e8624a6" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.477711] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]529ef9b7-4b8d-74e7-14a6-f2cf4b89a51d, 'name': SearchDatastore_Task, 'duration_secs': 0.008666} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.478047] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1674bdf-701f-4b53-96fa-52d8c3492001 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.483324] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2078.483324] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d630a1-d459-ab04-89ff-1d8aeff60d64" [ 2078.483324] env[63379]: _type = "Task" [ 2078.483324] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.491092] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d630a1-d459-ab04-89ff-1d8aeff60d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.669327] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2078.669517] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.145s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.994415] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52d630a1-d459-ab04-89ff-1d8aeff60d64, 'name': SearchDatastore_Task, 'duration_secs': 0.008626} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.994746] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.994931] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d7bd6d27-b949-42a8-b191-59a82e8624a6/d7bd6d27-b949-42a8-b191-59a82e8624a6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2078.995210] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d4ddeb9-507f-46ae-85ec-bfbd1cd35565 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.003497] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2079.003497] env[63379]: value = "task-1780530" [ 2079.003497] env[63379]: _type = "Task" [ 2079.003497] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.011097] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.514707] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.665717] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.666150] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.666460] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.666737] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.666995] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2080.018776] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.515567] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.016838] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.517693] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.019493] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.519625] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.020773] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780530, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.673783} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.021078] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d7bd6d27-b949-42a8-b191-59a82e8624a6/d7bd6d27-b949-42a8-b191-59a82e8624a6.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2083.021291] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2083.021547] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41b2c90c-3f26-4a29-899f-383f6dc31e90 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.028100] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2083.028100] env[63379]: value = "task-1780531" [ 2083.028100] env[63379]: _type = "Task" [ 2083.028100] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.036605] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780531, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.537896] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780531, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061162} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.538338] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2083.539101] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0647d403-7c7a-4f7f-a81c-370b59d4534c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.562152] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] d7bd6d27-b949-42a8-b191-59a82e8624a6/d7bd6d27-b949-42a8-b191-59a82e8624a6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2083.562476] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-637f35c1-b8f1-4c28-9fd8-8cf2d06faad7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.581948] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2083.581948] env[63379]: value = "task-1780532" [ 2083.581948] env[63379]: _type = "Task" [ 2083.581948] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.590308] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780532, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.965348] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.965531] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2084.092360] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780532, 'name': ReconfigVM_Task, 'duration_secs': 0.478932} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.092607] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Reconfigured VM instance instance-00000078 to attach disk [datastore1] d7bd6d27-b949-42a8-b191-59a82e8624a6/d7bd6d27-b949-42a8-b191-59a82e8624a6.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2084.093259] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14261036-b666-41a0-b30b-3026728a2ee5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.099435] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2084.099435] env[63379]: value = "task-1780533" [ 2084.099435] env[63379]: _type = "Task" [ 2084.099435] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.108020] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780533, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.609424] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780533, 'name': Rename_Task, 'duration_secs': 0.302242} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.609858] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2084.610054] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5d5bd5e-3fc5-468f-b73a-67008da373bd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.617438] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2084.617438] env[63379]: value = "task-1780534" [ 2084.617438] env[63379]: _type = "Task" [ 2084.617438] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.624805] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.127091] env[63379]: DEBUG oslo_vmware.api [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780534, 'name': PowerOnVM_Task, 'duration_secs': 0.444766} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.127386] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2085.127594] env[63379]: INFO nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Took 10.41 seconds to spawn the instance on the hypervisor. [ 2085.127780] env[63379]: DEBUG nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2085.128614] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ca6295-e96d-40e1-a8fc-95473d4b78bb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.645397] env[63379]: INFO nova.compute.manager [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Took 15.68 seconds to build instance. [ 2086.147233] env[63379]: DEBUG oslo_concurrency.lockutils [None req-e8b09c93-2d52-4478-a5ce-861d41f6ec94 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.185s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.006987] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "d7bd6d27-b949-42a8-b191-59a82e8624a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.007374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.007609] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "d7bd6d27-b949-42a8-b191-59a82e8624a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.007708] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.007884] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.010038] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2087.010188] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2087.010330] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2087.011575] env[63379]: INFO nova.compute.manager [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Terminating instance [ 2087.013188] env[63379]: DEBUG nova.compute.manager [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2087.013681] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2087.014231] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb7a376-87f1-4b62-b50e-a43ba16780e7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.022156] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2087.022380] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6ccce19-543a-420f-a14b-c2d0cf5d2e78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.028737] env[63379]: DEBUG oslo_vmware.api [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2087.028737] env[63379]: value = "task-1780535" [ 2087.028737] env[63379]: _type = "Task" [ 2087.028737] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.036802] env[63379]: DEBUG oslo_vmware.api [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780535, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.538445] env[63379]: DEBUG oslo_vmware.api [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780535, 'name': PowerOffVM_Task, 'duration_secs': 0.187174} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.538717] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2087.538885] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2087.539255] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfd07259-e944-4e7a-9709-80bccb69eb03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.665288] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2087.665531] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2087.665720] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleting the datastore file [datastore1] d7bd6d27-b949-42a8-b191-59a82e8624a6 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2087.666035] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d81f805-1833-48ed-8997-25850c5f5aa7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.672154] env[63379]: DEBUG oslo_vmware.api [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2087.672154] env[63379]: value = "task-1780537" [ 2087.672154] env[63379]: _type = "Task" [ 2087.672154] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.679969] env[63379]: DEBUG oslo_vmware.api [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780537, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.189917] env[63379]: DEBUG oslo_vmware.api [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780537, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13224} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.189917] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2088.190344] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2088.190344] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2088.190438] env[63379]: INFO nova.compute.manager [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2088.191452] env[63379]: DEBUG oslo.service.loopingcall [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2088.191452] env[63379]: DEBUG nova.compute.manager [-] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2088.191452] env[63379]: DEBUG nova.network.neutron [-] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2088.229884] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Updating instance_info_cache with network_info: [{"id": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "address": "fa:16:3e:91:bd:3a", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef74ed84-a4", "ovs_interfaceid": "ef74ed84-a494-4ce8-a037-458fd0285f2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.517661] env[63379]: DEBUG nova.compute.manager [req-ec1c0172-940d-49b4-bd21-625b5700aced req-204f1cb7-db2b-4d72-a45c-814aa32cc535 service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Received event network-vif-deleted-9e983951-c3d6-4a3e-8a8d-e67fb940734b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2088.517995] env[63379]: INFO nova.compute.manager [req-ec1c0172-940d-49b4-bd21-625b5700aced req-204f1cb7-db2b-4d72-a45c-814aa32cc535 service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Neutron deleted interface 9e983951-c3d6-4a3e-8a8d-e67fb940734b; detaching it from the instance and deleting it from the info cache [ 2088.518099] env[63379]: DEBUG nova.network.neutron [req-ec1c0172-940d-49b4-bd21-625b5700aced req-204f1cb7-db2b-4d72-a45c-814aa32cc535 service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.735861] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-85ecb409-ab53-43d9-8120-2f8c7402d74c" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.736097] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 2088.736308] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.736471] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.736628] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.994011] env[63379]: DEBUG nova.network.neutron [-] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2089.020489] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ceed08b9-2776-494d-b33b-2e864ad7ded4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.030154] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53456154-546f-4b5b-bb50-5aa9de33e3bf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.056915] env[63379]: DEBUG nova.compute.manager [req-ec1c0172-940d-49b4-bd21-625b5700aced req-204f1cb7-db2b-4d72-a45c-814aa32cc535 service nova] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Detach interface failed, port_id=9e983951-c3d6-4a3e-8a8d-e67fb940734b, reason: Instance d7bd6d27-b949-42a8-b191-59a82e8624a6 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2089.496648] env[63379]: INFO nova.compute.manager [-] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Took 1.31 seconds to deallocate network for instance. [ 2090.003093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.003093] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.003356] env[63379]: DEBUG nova.objects.instance [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'resources' on Instance uuid d7bd6d27-b949-42a8-b191-59a82e8624a6 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2090.595255] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5f1fce-797f-47ea-92de-8c151c81c6af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.603896] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addd0e3e-aec1-4553-bd33-07da9f943f04 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.633215] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdc6b09-4bf0-4c93-9fdf-f3703ef94b04 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.641136] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6e136c-e848-4819-b06e-8e8272a3ec36 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.657399] env[63379]: DEBUG nova.compute.provider_tree [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2091.046877] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.047177] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.047368] env[63379]: INFO nova.compute.manager [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Shelving [ 2091.161833] env[63379]: DEBUG nova.scheduler.client.report [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2091.554337] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2091.554612] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4af7ccf9-58d5-43c6-af8c-dfc3df5c38ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.561853] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2091.561853] env[63379]: value = "task-1780538" [ 2091.561853] env[63379]: _type = "Task" [ 2091.561853] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.569249] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.667049] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.708192] env[63379]: INFO nova.scheduler.client.report [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted allocations for instance d7bd6d27-b949-42a8-b191-59a82e8624a6 [ 2091.964666] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2092.071948] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780538, 'name': PowerOffVM_Task, 'duration_secs': 0.192969} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.072225] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2092.072983] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23098635-5730-4039-8a6e-e312144bd607 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.091689] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b35d43-5c21-426a-98c5-185c232ec8ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.215662] env[63379]: DEBUG oslo_concurrency.lockutils [None req-a434f63c-8eeb-4c9b-b44f-43deaa17531b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d7bd6d27-b949-42a8-b191-59a82e8624a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.208s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.598305] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "18498244-3385-47dd-8810-b0cc731c3966" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.598672] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "18498244-3385-47dd-8810-b0cc731c3966" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.598913] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "18498244-3385-47dd-8810-b0cc731c3966-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.599189] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "18498244-3385-47dd-8810-b0cc731c3966-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.599349] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "18498244-3385-47dd-8810-b0cc731c3966-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.601385] env[63379]: INFO nova.compute.manager [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Terminating instance [ 2092.603091] env[63379]: DEBUG nova.compute.manager [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2092.603295] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2092.604246] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2092.605054] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b783c9-8fb7-484d-b7b8-d2cb352e72c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.609485] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ec1d3747-0e3f-4988-98ba-ce8a7c43cc63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.616237] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2092.617349] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aea1ef16-2967-4576-a373-142e5aa1b4ad {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.618747] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2092.618747] env[63379]: value = "task-1780539" [ 2092.618747] env[63379]: _type = "Task" [ 2092.618747] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.622912] env[63379]: DEBUG oslo_vmware.api [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2092.622912] env[63379]: value = "task-1780540" [ 2092.622912] env[63379]: _type = "Task" [ 2092.622912] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.628290] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780539, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.633263] env[63379]: DEBUG oslo_vmware.api [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780540, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.131063] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780539, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.136448] env[63379]: DEBUG oslo_vmware.api [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780540, 'name': PowerOffVM_Task, 'duration_secs': 0.189308} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.136709] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2093.136881] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2093.137167] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38ddbf52-0cbe-4af1-900c-97afe5dbd05b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.376128] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2093.376380] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2093.376574] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleting the datastore file [datastore1] 18498244-3385-47dd-8810-b0cc731c3966 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2093.376856] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92de1471-bfd5-4078-853a-8c87f0577cef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.383479] env[63379]: DEBUG oslo_vmware.api [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2093.383479] env[63379]: value = "task-1780542" [ 2093.383479] env[63379]: _type = "Task" [ 2093.383479] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.391167] env[63379]: DEBUG oslo_vmware.api [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780542, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.628997] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780539, 'name': CreateSnapshot_Task, 'duration_secs': 0.606758} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.629368] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2093.630110] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd4035b-0bd5-4677-9410-821afdf04012 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.893743] env[63379]: DEBUG oslo_vmware.api [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780542, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137438} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.894166] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2093.894477] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2093.894784] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2093.895093] env[63379]: INFO nova.compute.manager [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Took 1.29 seconds to destroy the instance on the hypervisor. [ 2093.895473] env[63379]: DEBUG oslo.service.loopingcall [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2093.895741] env[63379]: DEBUG nova.compute.manager [-] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2093.895848] env[63379]: DEBUG nova.network.neutron [-] [instance: 18498244-3385-47dd-8810-b0cc731c3966] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2094.151865] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2094.152312] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a5a2dbbc-4012-4556-aa31-7536a8ef4895 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.163535] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2094.163535] env[63379]: value = "task-1780543" [ 2094.163535] env[63379]: _type = "Task" [ 2094.163535] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.174505] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780543, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.247664] env[63379]: DEBUG nova.compute.manager [req-053409a0-f846-47ad-b46b-2b26a1b47ce2 req-26146b40-23b2-4acc-9dfe-c1036144c24a service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Received event network-vif-deleted-6700d3b0-666a-4a4c-9ec7-7e59e38370b3 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2094.247947] env[63379]: INFO nova.compute.manager [req-053409a0-f846-47ad-b46b-2b26a1b47ce2 req-26146b40-23b2-4acc-9dfe-c1036144c24a service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Neutron deleted interface 6700d3b0-666a-4a4c-9ec7-7e59e38370b3; detaching it from the instance and deleting it from the info cache [ 2094.248223] env[63379]: DEBUG nova.network.neutron [req-053409a0-f846-47ad-b46b-2b26a1b47ce2 req-26146b40-23b2-4acc-9dfe-c1036144c24a service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.466689] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2094.466896] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11274}} [ 2094.673882] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780543, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.727041] env[63379]: DEBUG nova.network.neutron [-] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.750989] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8377702-82c5-4094-a68e-393910c8bd78 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.762447] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cbd6ee-1fd6-483e-8008-bbce2690e69f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.788233] env[63379]: DEBUG nova.compute.manager [req-053409a0-f846-47ad-b46b-2b26a1b47ce2 req-26146b40-23b2-4acc-9dfe-c1036144c24a service nova] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Detach interface failed, port_id=6700d3b0-666a-4a4c-9ec7-7e59e38370b3, reason: Instance 18498244-3385-47dd-8810-b0cc731c3966 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2094.978423] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] There are 37 instances to clean {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11283}} [ 2094.978590] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: d7bd6d27-b949-42a8-b191-59a82e8624a6] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2095.174602] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780543, 'name': CloneVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.233455] env[63379]: INFO nova.compute.manager [-] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Took 1.34 seconds to deallocate network for instance. [ 2095.481592] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 22296faa-10cf-48fe-a777-95d932987cf9] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2095.675725] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780543, 'name': CloneVM_Task, 'duration_secs': 1.071163} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.676044] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Created linked-clone VM from snapshot [ 2095.676755] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1be8849-8d84-46b9-841c-07f87e3b8261 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.684185] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Uploading image 128c5dc2-4347-46b9-bcea-aa973ef8f8d7 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2095.731767] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2095.731767] env[63379]: value = "vm-369531" [ 2095.731767] env[63379]: _type = "VirtualMachine" [ 2095.731767] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2095.732078] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f31cc182-b57d-4db0-b76a-56755b99d538 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.739909] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.740163] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.740387] env[63379]: DEBUG nova.objects.instance [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'resources' on Instance uuid 18498244-3385-47dd-8810-b0cc731c3966 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2095.742015] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease: (returnval){ [ 2095.742015] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f333bd-4aeb-75aa-8b95-7d1c7341501f" [ 2095.742015] env[63379]: _type = "HttpNfcLease" [ 2095.742015] env[63379]: } obtained for exporting VM: (result){ [ 2095.742015] env[63379]: value = "vm-369531" [ 2095.742015] env[63379]: _type = "VirtualMachine" [ 2095.742015] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2095.742300] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the lease: (returnval){ [ 2095.742300] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f333bd-4aeb-75aa-8b95-7d1c7341501f" [ 2095.742300] env[63379]: _type = "HttpNfcLease" [ 2095.742300] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2095.748605] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2095.748605] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f333bd-4aeb-75aa-8b95-7d1c7341501f" [ 2095.748605] env[63379]: _type = "HttpNfcLease" [ 2095.748605] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2095.984503] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 20649b93-78ac-4805-aa24-5dbfef9d766b] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2096.252543] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2096.252543] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f333bd-4aeb-75aa-8b95-7d1c7341501f" [ 2096.252543] env[63379]: _type = "HttpNfcLease" [ 2096.252543] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2096.252986] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2096.252986] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f333bd-4aeb-75aa-8b95-7d1c7341501f" [ 2096.252986] env[63379]: _type = "HttpNfcLease" [ 2096.252986] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2096.253646] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e261d122-9f23-41bd-a14b-8d652e390bc8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.261741] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520281b4-04bc-791b-621a-c8775705cb5f/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2096.261987] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520281b4-04bc-791b-621a-c8775705cb5f/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2096.353540] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1046588d-b960-4b2b-90ab-403f16dd628a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.362144] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5096a33a-6080-4b36-bf96-072ead6355f9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.370715] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6450f64-d801-4051-aa52-10e51f0134a0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.406996] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e584f6-663c-4cd1-87b7-d284cb3e01fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.414143] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338ad4aa-474e-4cc7-be3c-a0d1715c1806 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.426851] env[63379]: DEBUG nova.compute.provider_tree [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.488178] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 0b06665f-befc-4fa3-9eef-2c2f74ba382f] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2096.930457] env[63379]: DEBUG nova.scheduler.client.report [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2096.991261] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 8b9f070e-11d3-4e2d-a0ce-54bb939a36ff] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2097.435241] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.476334] env[63379]: INFO nova.scheduler.client.report [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted allocations for instance 18498244-3385-47dd-8810-b0cc731c3966 [ 2097.495192] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: a39c5511-3efc-41e9-8902-692f237557e1] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2097.984615] env[63379]: DEBUG oslo_concurrency.lockutils [None req-0758f7fa-08e7-4d05-8ed5-1f4573159292 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "18498244-3385-47dd-8810-b0cc731c3966" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.385s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.998663] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 2d84b7a4-5e38-45cc-89d5-fd3c6f6bc576] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2098.501942] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e1681d89-2f55-47b7-9962-55aa169b3d0a] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2098.871228] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.871423] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.005182] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 10fc842d-b821-4103-b6a5-f5b2fc46ea74] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2099.374570] env[63379]: DEBUG nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2099.508410] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: df8d513d-c201-4ffe-894e-cf8c3318cecc] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2099.908686] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.908972] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.910604] env[63379]: INFO nova.compute.claims [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2100.011761] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 1c983c16-6f86-4932-9698-7fb1428ca231] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2100.515176] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: d4988643-18ff-44c8-8363-e0de43da2abe] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2100.968955] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f38375-fd0f-4b20-a1f5-70c2bb3a4c8a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.977353] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86198f0-965d-45c5-867c-664e032985e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.008286] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fc07d0-61d7-4189-931e-8dbdf59c44c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.016102] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae72097-9150-4383-96ca-ea5e43acbe07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.020204] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 14adcb7b-b754-407e-9a99-28a1ca2ede68] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2101.031556] env[63379]: DEBUG nova.compute.provider_tree [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2101.523908] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: b3f753e3-2ec6-4359-8de0-f9c771e274e5] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2101.534242] env[63379]: DEBUG nova.scheduler.client.report [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2102.027200] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 1f37b5c4-fbbf-4391-b9a2-537bdfbd33e6] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2102.038743] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.130s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.039297] env[63379]: DEBUG nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2102.530900] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 48c17c3b-1197-46cb-a0f7-3671b2d82c7e] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2102.543703] env[63379]: DEBUG nova.compute.utils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2102.545040] env[63379]: DEBUG nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2102.545233] env[63379]: DEBUG nova.network.neutron [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2102.587208] env[63379]: DEBUG nova.policy [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2e7c2125f0044508dc4016c4de224e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9746ae945355479fa5880802e08d2b0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2102.918848] env[63379]: DEBUG nova.network.neutron [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Successfully created port: 89e016a5-2e49-4917-a716-ac799ceeda69 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2103.034656] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: d3c05ba6-b565-4432-b815-14ae0933853e] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2103.048421] env[63379]: DEBUG nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2103.465458] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520281b4-04bc-791b-621a-c8775705cb5f/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2103.466415] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51858c2-1963-473d-8205-e1d578470257 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.472790] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520281b4-04bc-791b-621a-c8775705cb5f/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2103.472957] env[63379]: ERROR oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520281b4-04bc-791b-621a-c8775705cb5f/disk-0.vmdk due to incomplete transfer. [ 2103.473211] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-509b237e-0af1-406c-8075-ae6f5f7cf9b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.480624] env[63379]: DEBUG oslo_vmware.rw_handles [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520281b4-04bc-791b-621a-c8775705cb5f/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2103.480826] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Uploaded image 128c5dc2-4347-46b9-bcea-aa973ef8f8d7 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2103.483230] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2103.483478] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2bc352ca-1bdb-4e64-9719-1bd567923be2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.489536] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2103.489536] env[63379]: value = "task-1780545" [ 2103.489536] env[63379]: _type = "Task" [ 2103.489536] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.499044] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780545, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.538803] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 5e7a8635-8345-41c3-b485-a89773f37c5e] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2103.999987] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780545, 'name': Destroy_Task, 'duration_secs': 0.302243} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.000286] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Destroyed the VM [ 2104.000535] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2104.000785] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-feb9f0e5-1b2f-4a78-88b4-bb0617e9963c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.007146] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2104.007146] env[63379]: value = "task-1780546" [ 2104.007146] env[63379]: _type = "Task" [ 2104.007146] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.014760] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780546, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.042274] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 88dae632-b363-4187-9198-e4300783d420] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2104.057661] env[63379]: DEBUG nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2104.082508] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2104.082744] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2104.082908] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2104.083109] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2104.083264] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2104.083415] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2104.083666] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2104.083845] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2104.084031] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2104.084204] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2104.084382] env[63379]: DEBUG nova.virt.hardware [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2104.085229] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8fcfcd-9ad2-42cc-a3a1-da00bbbf5d63 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.093387] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55ed887-ea5e-485e-8f2e-59de0b548ad3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.403708] env[63379]: DEBUG nova.compute.manager [req-cac32e84-a98f-4c19-8eb2-dfe47f0fb986 req-d53e5347-b264-4f37-8f9b-2ef17fb29778 service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Received event network-vif-plugged-89e016a5-2e49-4917-a716-ac799ceeda69 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2104.403995] env[63379]: DEBUG oslo_concurrency.lockutils [req-cac32e84-a98f-4c19-8eb2-dfe47f0fb986 req-d53e5347-b264-4f37-8f9b-2ef17fb29778 service nova] Acquiring lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.404184] env[63379]: DEBUG oslo_concurrency.lockutils [req-cac32e84-a98f-4c19-8eb2-dfe47f0fb986 req-d53e5347-b264-4f37-8f9b-2ef17fb29778 service nova] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.404355] env[63379]: DEBUG oslo_concurrency.lockutils [req-cac32e84-a98f-4c19-8eb2-dfe47f0fb986 req-d53e5347-b264-4f37-8f9b-2ef17fb29778 service nova] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.404541] env[63379]: DEBUG nova.compute.manager [req-cac32e84-a98f-4c19-8eb2-dfe47f0fb986 req-d53e5347-b264-4f37-8f9b-2ef17fb29778 service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] No waiting events found dispatching network-vif-plugged-89e016a5-2e49-4917-a716-ac799ceeda69 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2104.404738] env[63379]: WARNING nova.compute.manager [req-cac32e84-a98f-4c19-8eb2-dfe47f0fb986 req-d53e5347-b264-4f37-8f9b-2ef17fb29778 service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Received unexpected event network-vif-plugged-89e016a5-2e49-4917-a716-ac799ceeda69 for instance with vm_state building and task_state spawning. [ 2104.489258] env[63379]: DEBUG nova.network.neutron [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Successfully updated port: 89e016a5-2e49-4917-a716-ac799ceeda69 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2104.516676] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780546, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.545100] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 815d0af5-e9a8-4475-9414-42715ea32d6a] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2104.992156] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "refresh_cache-d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.992314] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "refresh_cache-d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.992468] env[63379]: DEBUG nova.network.neutron [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2105.017219] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780546, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.048174] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 2be6bdea-416e-4912-8930-3c4e4f194f99] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2105.517521] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780546, 'name': RemoveSnapshot_Task, 'duration_secs': 1.080769} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.517873] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2105.518213] env[63379]: DEBUG nova.compute.manager [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2105.519026] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845d32c7-2527-46a0-9b6f-cca2aef47789 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.523390] env[63379]: DEBUG nova.network.neutron [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2105.551492] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: f99bad46-931d-497a-8586-b140309b0b45] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2105.652942] env[63379]: DEBUG nova.network.neutron [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Updating instance_info_cache with network_info: [{"id": "89e016a5-2e49-4917-a716-ac799ceeda69", "address": "fa:16:3e:67:aa:1c", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e016a5-2e", "ovs_interfaceid": "89e016a5-2e49-4917-a716-ac799ceeda69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.032599] env[63379]: INFO nova.compute.manager [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Shelve offloading [ 2106.034123] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2106.034378] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f3020fd-6ea2-4b59-b983-f05a6a07e1af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.041942] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2106.041942] env[63379]: value = "task-1780547" [ 2106.041942] env[63379]: _type = "Task" [ 2106.041942] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.052337] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2106.052518] env[63379]: DEBUG nova.compute.manager [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2106.053243] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbba691-0cfe-43db-8713-18b679807030 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.055580] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 62494fa1-5990-490d-92ae-00607d7ebba1] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2106.060129] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.060299] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.060480] env[63379]: DEBUG nova.network.neutron [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2106.155876] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "refresh_cache-d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.156190] env[63379]: DEBUG nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Instance network_info: |[{"id": "89e016a5-2e49-4917-a716-ac799ceeda69", "address": "fa:16:3e:67:aa:1c", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e016a5-2e", "ovs_interfaceid": "89e016a5-2e49-4917-a716-ac799ceeda69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2106.156625] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:aa:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89e016a5-2e49-4917-a716-ac799ceeda69', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2106.164079] env[63379]: DEBUG oslo.service.loopingcall [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2106.164288] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2106.164509] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08f03b34-cddc-4d4f-8d40-0486e8bd9393 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.184808] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2106.184808] env[63379]: value = "task-1780548" [ 2106.184808] env[63379]: _type = "Task" [ 2106.184808] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.192286] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780548, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.430537] env[63379]: DEBUG nova.compute.manager [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Received event network-changed-89e016a5-2e49-4917-a716-ac799ceeda69 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2106.430655] env[63379]: DEBUG nova.compute.manager [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Refreshing instance network info cache due to event network-changed-89e016a5-2e49-4917-a716-ac799ceeda69. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2106.430870] env[63379]: DEBUG oslo_concurrency.lockutils [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] Acquiring lock "refresh_cache-d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.431030] env[63379]: DEBUG oslo_concurrency.lockutils [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] Acquired lock "refresh_cache-d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.431207] env[63379]: DEBUG nova.network.neutron [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Refreshing network info cache for port 89e016a5-2e49-4917-a716-ac799ceeda69 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2106.557991] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 9faef8ba-2263-4af8-ba5b-13a17b4275b6] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2106.694141] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780548, 'name': CreateVM_Task, 'duration_secs': 0.319779} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.694390] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2106.695015] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.695272] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.695517] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2106.695766] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efc50a5e-80dc-412d-93cb-cbd0608b8068 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.700053] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2106.700053] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5254d233-24de-a655-bbba-176abedfed6d" [ 2106.700053] env[63379]: _type = "Task" [ 2106.700053] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.707120] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5254d233-24de-a655-bbba-176abedfed6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.811792] env[63379]: DEBUG nova.network.neutron [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4822da6-35", "ovs_interfaceid": "f4822da6-3551-4e0b-937f-55536f9c7342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.061700] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 8078bac6-146a-4e3a-a7a7-7093f617a330] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2107.121469] env[63379]: DEBUG nova.network.neutron [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Updated VIF entry in instance network info cache for port 89e016a5-2e49-4917-a716-ac799ceeda69. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2107.121857] env[63379]: DEBUG nova.network.neutron [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Updating instance_info_cache with network_info: [{"id": "89e016a5-2e49-4917-a716-ac799ceeda69", "address": "fa:16:3e:67:aa:1c", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89e016a5-2e", "ovs_interfaceid": "89e016a5-2e49-4917-a716-ac799ceeda69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2107.211084] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5254d233-24de-a655-bbba-176abedfed6d, 'name': SearchDatastore_Task, 'duration_secs': 0.009375} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.211345] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2107.211587] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2107.211830] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2107.211973] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2107.212170] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2107.212427] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c82ecc8d-9f6f-4db7-812f-36c95f7bcfc6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.220171] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2107.220350] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2107.221021] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd5baa19-2c6b-41f4-b534-7d2bf68e72ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.225555] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2107.225555] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5211b1e8-021b-30ad-6e21-ca848f36f15e" [ 2107.225555] env[63379]: _type = "Task" [ 2107.225555] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.233287] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5211b1e8-021b-30ad-6e21-ca848f36f15e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.315058] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2107.565960] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: cb62192b-63db-40d0-97bb-1df171ade64b] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2107.624472] env[63379]: DEBUG oslo_concurrency.lockutils [req-09178be7-39c4-4846-bec7-c948f430aacc req-51c5f326-9aef-4ff8-ade0-526ea39d448f service nova] Releasing lock "refresh_cache-d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2107.648448] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2107.649364] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a60aa5-e457-42a8-be8d-346cea3b8cc0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.657304] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2107.657538] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1825c169-077a-4d22-ae4f-e083e3402cc0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.736323] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5211b1e8-021b-30ad-6e21-ca848f36f15e, 'name': SearchDatastore_Task, 'duration_secs': 0.008168} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.736583] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ce92dcc-6406-4d27-bacc-d8b64068699b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.742431] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2107.742431] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52494e4b-912d-4d3a-5ff4-ca087b1d9b5a" [ 2107.742431] env[63379]: _type = "Task" [ 2107.742431] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.750021] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52494e4b-912d-4d3a-5ff4-ca087b1d9b5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.756263] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2107.756471] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2107.756653] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleting the datastore file [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2107.756895] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf232cce-e41c-4575-bc9c-49db91fb98e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.762712] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2107.762712] env[63379]: value = "task-1780550" [ 2107.762712] env[63379]: _type = "Task" [ 2107.762712] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.769810] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.069812] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 9040201c-e1de-47d9-b9c2-b30c14e32749] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2108.252800] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52494e4b-912d-4d3a-5ff4-ca087b1d9b5a, 'name': SearchDatastore_Task, 'duration_secs': 0.010202} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.253092] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2108.253349] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d9640bf2-0f88-4c0e-9e21-7ee00ee8800f/d9640bf2-0f88-4c0e-9e21-7ee00ee8800f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2108.253599] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b569ae07-959c-49cf-88ad-d336f95a95de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.260080] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2108.260080] env[63379]: value = "task-1780551" [ 2108.260080] env[63379]: _type = "Task" [ 2108.260080] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.268181] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780551, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.272515] env[63379]: DEBUG oslo_vmware.api [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139226} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.272743] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2108.272936] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2108.273125] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2108.297493] env[63379]: INFO nova.scheduler.client.report [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted allocations for instance 7f0c426b-1ce3-469f-8ee1-6dd2178f014e [ 2108.459818] env[63379]: DEBUG nova.compute.manager [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-vif-unplugged-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2108.460061] env[63379]: DEBUG oslo_concurrency.lockutils [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.460286] env[63379]: DEBUG oslo_concurrency.lockutils [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2108.460478] env[63379]: DEBUG oslo_concurrency.lockutils [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2108.460659] env[63379]: DEBUG nova.compute.manager [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] No waiting events found dispatching network-vif-unplugged-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2108.460831] env[63379]: WARNING nova.compute.manager [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received unexpected event network-vif-unplugged-f4822da6-3551-4e0b-937f-55536f9c7342 for instance with vm_state shelved_offloaded and task_state None. [ 2108.460991] env[63379]: DEBUG nova.compute.manager [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2108.461159] env[63379]: DEBUG nova.compute.manager [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing instance network info cache due to event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2108.461340] env[63379]: DEBUG oslo_concurrency.lockutils [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] Acquiring lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2108.461476] env[63379]: DEBUG oslo_concurrency.lockutils [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] Acquired lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.461636] env[63379]: DEBUG nova.network.neutron [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2108.573661] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 4b419aa8-d4da-45fd-a6da-6f05ee851f2f] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2108.771014] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780551, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.802494] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.802792] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2108.803029] env[63379]: DEBUG nova.objects.instance [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'resources' on Instance uuid 7f0c426b-1ce3-469f-8ee1-6dd2178f014e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2109.076847] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: c900bb90-b4a8-40a2-9436-5a0ced1dd919] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2109.270791] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780551, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.946217} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.271775] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] d9640bf2-0f88-4c0e-9e21-7ee00ee8800f/d9640bf2-0f88-4c0e-9e21-7ee00ee8800f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2109.272065] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2109.272477] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2baa9856-1b84-4dbc-8370-fb6e9a677818 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.277690] env[63379]: DEBUG nova.network.neutron [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updated VIF entry in instance network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2109.278029] env[63379]: DEBUG nova.network.neutron [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": null, "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf4822da6-35", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2109.280503] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2109.280503] env[63379]: value = "task-1780552" [ 2109.280503] env[63379]: _type = "Task" [ 2109.280503] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.289376] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780552, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.305414] env[63379]: DEBUG nova.objects.instance [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'numa_topology' on Instance uuid 7f0c426b-1ce3-469f-8ee1-6dd2178f014e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2109.580329] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 19941838-d6b0-4fb8-9d06-f4a1b80ba428] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2109.785709] env[63379]: DEBUG oslo_concurrency.lockutils [req-f937d581-a7a3-4b88-8efe-dfe60cb2fac7 req-4c94a40c-f50a-4f94-8491-5f0fc293bf1f service nova] Releasing lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2109.791755] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780552, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063078} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.792043] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2109.792800] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b3a101-b8b0-4722-8001-5bb020a742b3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.814022] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] d9640bf2-0f88-4c0e-9e21-7ee00ee8800f/d9640bf2-0f88-4c0e-9e21-7ee00ee8800f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2109.815579] env[63379]: DEBUG nova.objects.base [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Object Instance<7f0c426b-1ce3-469f-8ee1-6dd2178f014e> lazy-loaded attributes: resources,numa_topology {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2109.817875] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04290036-27c5-48e0-9b11-19f896fc0e4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.840291] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2109.840291] env[63379]: value = "task-1780553" [ 2109.840291] env[63379]: _type = "Task" [ 2109.840291] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.852891] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780553, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.879688] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca07c2d7-140b-45e3-87c8-716ce4429868 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.887101] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97a487a-88db-4450-bc6f-acacd73d7cab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.916885] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b657b78a-8af9-4a0c-8def-3f01f359a584 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.923924] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4123a3d5-184f-49e4-a551-82d15a735030 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.937553] env[63379]: DEBUG nova.compute.provider_tree [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2110.083653] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: a7cce485-7476-4ea1-b127-68d879e164cd] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2110.336028] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.349983] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780553, 'name': ReconfigVM_Task, 'duration_secs': 0.291802} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.351172] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Reconfigured VM instance instance-00000079 to attach disk [datastore1] d9640bf2-0f88-4c0e-9e21-7ee00ee8800f/d9640bf2-0f88-4c0e-9e21-7ee00ee8800f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2110.351172] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-730e9487-95fa-4752-9ab0-d2e25b164560 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.356639] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2110.356639] env[63379]: value = "task-1780554" [ 2110.356639] env[63379]: _type = "Task" [ 2110.356639] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.363969] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780554, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.440960] env[63379]: DEBUG nova.scheduler.client.report [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2110.588353] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 510db409-0b4c-494a-8084-39ef3cd6c918] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2110.866744] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780554, 'name': Rename_Task, 'duration_secs': 0.15167} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.866964] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2110.867311] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfc07216-d43e-40cd-b9c1-966e163e5d18 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.873946] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2110.873946] env[63379]: value = "task-1780555" [ 2110.873946] env[63379]: _type = "Task" [ 2110.873946] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.881243] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.946337] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.091663] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 19a41941-0679-4971-8a44-c95b13f5c294] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2111.383604] env[63379]: DEBUG oslo_vmware.api [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780555, 'name': PowerOnVM_Task, 'duration_secs': 0.449875} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.385137] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2111.385137] env[63379]: INFO nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Took 7.33 seconds to spawn the instance on the hypervisor. [ 2111.385137] env[63379]: DEBUG nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2111.385280] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e96aa73-9fe4-4fc9-a15d-f04b1c4c1ec6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.454628] env[63379]: DEBUG oslo_concurrency.lockutils [None req-37586a41-d9c8-4574-836d-c514dada50fa tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.407s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.455482] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.120s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.455669] env[63379]: INFO nova.compute.manager [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Unshelving [ 2111.595196] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 1d76a28f-822d-4b4f-be2f-2ad3371b3979] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2111.903777] env[63379]: INFO nova.compute.manager [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Took 12.02 seconds to build instance. [ 2112.099053] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 3b662a31-76b9-4ac8-a6bd-bc4983f7fec9] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2112.405252] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f3b135e4-20a7-49de-9b40-a8054f9b5e56 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.534s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.476712] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.477075] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.477276] env[63379]: DEBUG nova.objects.instance [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'pci_requests' on Instance uuid 7f0c426b-1ce3-469f-8ee1-6dd2178f014e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2112.601402] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 1d2de9da-9dfe-42d2-b206-bb5139b1970b] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2112.980880] env[63379]: DEBUG nova.objects.instance [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'numa_topology' on Instance uuid 7f0c426b-1ce3-469f-8ee1-6dd2178f014e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2113.104706] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 90f0c97d-695b-4975-8ab9-4e77a9175da1] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2113.313639] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.313934] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.314619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.314834] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.315096] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.317272] env[63379]: INFO nova.compute.manager [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Terminating instance [ 2113.319118] env[63379]: DEBUG nova.compute.manager [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2113.319324] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2113.320238] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71fe19a-0e7e-45fa-bb7d-8e7405af742b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.327839] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2113.328342] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b95cf23-348c-49bb-bbf3-b57408e81e13 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.334688] env[63379]: DEBUG oslo_vmware.api [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2113.334688] env[63379]: value = "task-1780556" [ 2113.334688] env[63379]: _type = "Task" [ 2113.334688] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.342189] env[63379]: DEBUG oslo_vmware.api [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.483848] env[63379]: INFO nova.compute.claims [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2113.607965] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2113.608491] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances with incomplete migration {{(pid=63379) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11312}} [ 2113.844940] env[63379]: DEBUG oslo_vmware.api [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780556, 'name': PowerOffVM_Task, 'duration_secs': 0.166307} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.845238] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2113.845411] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2113.845661] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98ae8301-ed8d-4344-9494-61c680b13915 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.957788] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2113.958054] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2113.958221] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleting the datastore file [datastore1] d9640bf2-0f88-4c0e-9e21-7ee00ee8800f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2113.958489] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8fb77eb-beef-4af0-9554-a602fa569cfd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.964232] env[63379]: DEBUG oslo_vmware.api [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2113.964232] env[63379]: value = "task-1780558" [ 2113.964232] env[63379]: _type = "Task" [ 2113.964232] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.972505] env[63379]: DEBUG oslo_vmware.api [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.474083] env[63379]: DEBUG oslo_vmware.api [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167958} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.474344] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2114.474542] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2114.474771] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2114.474966] env[63379]: INFO nova.compute.manager [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2114.475232] env[63379]: DEBUG oslo.service.loopingcall [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2114.475429] env[63379]: DEBUG nova.compute.manager [-] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2114.475526] env[63379]: DEBUG nova.network.neutron [-] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2114.537297] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443a422f-b1b2-47e0-9bc2-e5e415f74129 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.545606] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c47c962-a446-4042-bd5a-66487dfed801 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.576424] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfa0804-4936-4baf-94d0-249b0c8ccd71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.583064] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d8709b-9528-4fb3-b048-75bdb1d57c98 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.595989] env[63379]: DEBUG nova.compute.provider_tree [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2114.715296] env[63379]: DEBUG nova.compute.manager [req-852fd314-abe3-4306-ac09-6f3fa7850414 req-19b1e6be-3275-4f85-9664-ac39022c2035 service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Received event network-vif-deleted-89e016a5-2e49-4917-a716-ac799ceeda69 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2114.715545] env[63379]: INFO nova.compute.manager [req-852fd314-abe3-4306-ac09-6f3fa7850414 req-19b1e6be-3275-4f85-9664-ac39022c2035 service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Neutron deleted interface 89e016a5-2e49-4917-a716-ac799ceeda69; detaching it from the instance and deleting it from the info cache [ 2114.715688] env[63379]: DEBUG nova.network.neutron [req-852fd314-abe3-4306-ac09-6f3fa7850414 req-19b1e6be-3275-4f85-9664-ac39022c2035 service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.099009] env[63379]: DEBUG nova.scheduler.client.report [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2115.189149] env[63379]: DEBUG nova.network.neutron [-] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.217813] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e565356-99c7-4a4f-beb8-8d5c8a2a6cb5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.227251] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f54f18e-53ce-4ec3-a56f-faef87d9f38a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.251888] env[63379]: DEBUG nova.compute.manager [req-852fd314-abe3-4306-ac09-6f3fa7850414 req-19b1e6be-3275-4f85-9664-ac39022c2035 service nova] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Detach interface failed, port_id=89e016a5-2e49-4917-a716-ac799ceeda69, reason: Instance d9640bf2-0f88-4c0e-9e21-7ee00ee8800f could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2115.604880] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.128s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.633156] env[63379]: INFO nova.network.neutron [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating port f4822da6-3551-4e0b-937f-55536f9c7342 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2115.691768] env[63379]: INFO nova.compute.manager [-] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Took 1.22 seconds to deallocate network for instance. [ 2116.198123] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.198401] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.198645] env[63379]: DEBUG nova.objects.instance [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'resources' on Instance uuid d9640bf2-0f88-4c0e-9e21-7ee00ee8800f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2116.751614] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4561a881-fdc0-488e-87af-3b60a5faa05f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.759382] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3316dfb-dea2-4d69-8d93-323b1fa91f13 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.790347] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f414d0-06ed-4bff-b7dd-ed039fdbe728 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.797632] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eb93b9-761d-4b18-ae2e-a515b1697702 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.810440] env[63379]: DEBUG nova.compute.provider_tree [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2117.064939] env[63379]: DEBUG nova.compute.manager [req-a4e021da-96d2-4aa8-a3bb-d5a68aee12a3 req-0585cf80-04fc-437b-be93-89b537fa759c service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-vif-plugged-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2117.065537] env[63379]: DEBUG oslo_concurrency.lockutils [req-a4e021da-96d2-4aa8-a3bb-d5a68aee12a3 req-0585cf80-04fc-437b-be93-89b537fa759c service nova] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.065763] env[63379]: DEBUG oslo_concurrency.lockutils [req-a4e021da-96d2-4aa8-a3bb-d5a68aee12a3 req-0585cf80-04fc-437b-be93-89b537fa759c service nova] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.065942] env[63379]: DEBUG oslo_concurrency.lockutils [req-a4e021da-96d2-4aa8-a3bb-d5a68aee12a3 req-0585cf80-04fc-437b-be93-89b537fa759c service nova] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.066327] env[63379]: DEBUG nova.compute.manager [req-a4e021da-96d2-4aa8-a3bb-d5a68aee12a3 req-0585cf80-04fc-437b-be93-89b537fa759c service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] No waiting events found dispatching network-vif-plugged-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2117.066529] env[63379]: WARNING nova.compute.manager [req-a4e021da-96d2-4aa8-a3bb-d5a68aee12a3 req-0585cf80-04fc-437b-be93-89b537fa759c service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received unexpected event network-vif-plugged-f4822da6-3551-4e0b-937f-55536f9c7342 for instance with vm_state shelved_offloaded and task_state spawning. [ 2117.153262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.153262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.153262] env[63379]: DEBUG nova.network.neutron [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2117.314052] env[63379]: DEBUG nova.scheduler.client.report [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2117.818068] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.837832] env[63379]: INFO nova.scheduler.client.report [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted allocations for instance d9640bf2-0f88-4c0e-9e21-7ee00ee8800f [ 2117.845988] env[63379]: DEBUG nova.network.neutron [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4822da6-35", "ovs_interfaceid": "f4822da6-3551-4e0b-937f-55536f9c7342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.348882] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.351224] env[63379]: DEBUG oslo_concurrency.lockutils [None req-db621b10-b7ca-4db0-b533-9db2b7d86a9b tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "d9640bf2-0f88-4c0e-9e21-7ee00ee8800f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.037s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.373647] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='2a567132cedee092cd71ffa334adf002',container_format='bare',created_at=2024-12-11T23:40:30Z,direct_url=,disk_format='vmdk',id=128c5dc2-4347-46b9-bcea-aa973ef8f8d7,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-95528237-shelved',owner='645f0e0a5e1a44d59ca9c85da49bb454',properties=ImageMetaProps,protected=,size=31665152,status='active',tags=,updated_at=2024-12-11T23:40:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2118.373888] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2118.374062] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2118.374254] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2118.374403] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2118.374552] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2118.374765] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2118.374934] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2118.375117] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2118.375285] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2118.375465] env[63379]: DEBUG nova.virt.hardware [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2118.376541] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ac34c4-37a8-4f13-ac3c-6986cd256a55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.384760] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190e1618-1d49-4e2f-81a1-193bdf81f301 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.398702] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:f3:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4822da6-3551-4e0b-937f-55536f9c7342', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2118.406097] env[63379]: DEBUG oslo.service.loopingcall [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2118.406562] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2118.406772] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a029c8f9-7f37-4758-a77c-e6bde616e2f0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.424505] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2118.424505] env[63379]: value = "task-1780559" [ 2118.424505] env[63379]: _type = "Task" [ 2118.424505] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.433235] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780559, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.934124] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780559, 'name': CreateVM_Task, 'duration_secs': 0.315539} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.934302] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2118.934896] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.935080] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.935469] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2118.935724] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-741d5046-ece3-4ff6-907f-076c7cda12fb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.939920] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2118.939920] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52605643-0ebb-4aa6-38d1-48b832f4862f" [ 2118.939920] env[63379]: _type = "Task" [ 2118.939920] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.946947] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52605643-0ebb-4aa6-38d1-48b832f4862f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.090864] env[63379]: DEBUG nova.compute.manager [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2119.091095] env[63379]: DEBUG nova.compute.manager [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing instance network info cache due to event network-changed-f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2119.091323] env[63379]: DEBUG oslo_concurrency.lockutils [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] Acquiring lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.091475] env[63379]: DEBUG oslo_concurrency.lockutils [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] Acquired lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.091658] env[63379]: DEBUG nova.network.neutron [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Refreshing network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2119.116763] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "dc3117be-09c2-445e-a575-ff588d94238a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.117015] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.450041] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2119.450394] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Processing image 128c5dc2-4347-46b9-bcea-aa973ef8f8d7 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2119.450508] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7/128c5dc2-4347-46b9-bcea-aa973ef8f8d7.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.450687] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7/128c5dc2-4347-46b9-bcea-aa973ef8f8d7.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.450885] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2119.451146] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-411a3836-5234-4882-800c-925f22e90d45 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.459462] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2119.459462] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2119.460053] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5861c30f-2470-4fe0-936f-0cf331a1154c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.464615] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2119.464615] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a0f261-3ae2-005b-4e15-7542c30cfa9a" [ 2119.464615] env[63379]: _type = "Task" [ 2119.464615] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.473059] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a0f261-3ae2-005b-4e15-7542c30cfa9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.619252] env[63379]: DEBUG nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2119.801431] env[63379]: DEBUG nova.network.neutron [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updated VIF entry in instance network info cache for port f4822da6-3551-4e0b-937f-55536f9c7342. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2119.801822] env[63379]: DEBUG nova.network.neutron [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [{"id": "f4822da6-3551-4e0b-937f-55536f9c7342", "address": "fa:16:3e:69:f3:f8", "network": {"id": "0dd98be0-5b25-4e45-ac38-4b8d3cd9fc6c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-191573180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "645f0e0a5e1a44d59ca9c85da49bb454", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4822da6-35", "ovs_interfaceid": "f4822da6-3551-4e0b-937f-55536f9c7342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.975249] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Preparing fetch location {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2119.975486] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Fetch image to [datastore1] OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd/OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd.vmdk {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2119.975651] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Downloading stream optimized image 128c5dc2-4347-46b9-bcea-aa973ef8f8d7 to [datastore1] OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd/OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd.vmdk on the data store datastore1 as vApp {{(pid=63379) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2119.975830] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Downloading image file data 128c5dc2-4347-46b9-bcea-aa973ef8f8d7 to the ESX as VM named 'OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd' {{(pid=63379) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2120.043279] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2120.043279] env[63379]: value = "resgroup-9" [ 2120.043279] env[63379]: _type = "ResourcePool" [ 2120.043279] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2120.043556] env[63379]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-8f583f92-01e4-4981-9fb3-f77da56519cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.063459] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease: (returnval){ [ 2120.063459] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f6c50a-fe4b-9939-b96b-ed3a026ee6cb" [ 2120.063459] env[63379]: _type = "HttpNfcLease" [ 2120.063459] env[63379]: } obtained for vApp import into resource pool (val){ [ 2120.063459] env[63379]: value = "resgroup-9" [ 2120.063459] env[63379]: _type = "ResourcePool" [ 2120.063459] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2120.063901] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the lease: (returnval){ [ 2120.063901] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f6c50a-fe4b-9939-b96b-ed3a026ee6cb" [ 2120.063901] env[63379]: _type = "HttpNfcLease" [ 2120.063901] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2120.070555] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2120.070555] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f6c50a-fe4b-9939-b96b-ed3a026ee6cb" [ 2120.070555] env[63379]: _type = "HttpNfcLease" [ 2120.070555] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2120.142180] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.142455] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2120.143965] env[63379]: INFO nova.compute.claims [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2120.304335] env[63379]: DEBUG oslo_concurrency.lockutils [req-2291bb58-44ee-4b4c-ac77-75c4e0c9da94 req-f041f1e3-1d4e-4a00-a424-95209f0aa22f service nova] Releasing lock "refresh_cache-7f0c426b-1ce3-469f-8ee1-6dd2178f014e" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.572072] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2120.572072] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f6c50a-fe4b-9939-b96b-ed3a026ee6cb" [ 2120.572072] env[63379]: _type = "HttpNfcLease" [ 2120.572072] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2120.572544] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2120.572544] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f6c50a-fe4b-9939-b96b-ed3a026ee6cb" [ 2120.572544] env[63379]: _type = "HttpNfcLease" [ 2120.572544] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2120.573018] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4a7236-41a1-4ac0-99e4-3682d2f095eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.580171] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293c03d-e486-e6b9-daf5-99a8cf74b693/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2120.580348] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating HTTP connection to write to file with size = 31665152 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293c03d-e486-e6b9-daf5-99a8cf74b693/disk-0.vmdk. {{(pid=63379) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2120.644107] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1a9c0f91-6d47-49b1-b169-0a5094ca3eb0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.198985] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862cf61f-750e-4f06-af4d-0e811908f53d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.207466] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821ef23b-364b-4940-ac2c-48381ef3c306 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.239683] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687f81e2-75a9-4cb4-9c70-b51f8cb07fa1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.247495] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56a8498-9d13-454c-925e-d5772e3820ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.261710] env[63379]: DEBUG nova.compute.provider_tree [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2121.755323] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Completed reading data from the image iterator. {{(pid=63379) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2121.755859] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293c03d-e486-e6b9-daf5-99a8cf74b693/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2121.756642] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0371a5-0785-4f3d-8c0e-a3dadee40905 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.764628] env[63379]: DEBUG nova.scheduler.client.report [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2121.767674] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293c03d-e486-e6b9-daf5-99a8cf74b693/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2121.767857] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293c03d-e486-e6b9-daf5-99a8cf74b693/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2121.768307] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-86d31784-3dbe-43a5-aef3-3e9946dded62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.955651] env[63379]: DEBUG oslo_vmware.rw_handles [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5293c03d-e486-e6b9-daf5-99a8cf74b693/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2121.955882] env[63379]: INFO nova.virt.vmwareapi.images [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Downloaded image file data 128c5dc2-4347-46b9-bcea-aa973ef8f8d7 [ 2121.956649] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a935ef-c08b-4f3b-8698-842c33309c86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.972448] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b4e28d5-0c3d-49ba-afb5-4bb6a23fc76d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.008974] env[63379]: INFO nova.virt.vmwareapi.images [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] The imported VM was unregistered [ 2122.011464] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Caching image {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2122.011707] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Creating directory with path [datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2122.011972] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1313908-24da-4371-a2ac-8102c540acd7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.224645] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Created directory with path [datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2122.224842] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd/OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd.vmdk to [datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7/128c5dc2-4347-46b9-bcea-aa973ef8f8d7.vmdk. {{(pid=63379) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2122.225101] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8b175ed8-3bf8-46b3-a5bf-0cb3432239c7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.232662] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2122.232662] env[63379]: value = "task-1780562" [ 2122.232662] env[63379]: _type = "Task" [ 2122.232662] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.239697] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.269599] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.127s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.270146] env[63379]: DEBUG nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2122.743484] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.775164] env[63379]: DEBUG nova.compute.utils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2122.776657] env[63379]: DEBUG nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2122.776835] env[63379]: DEBUG nova.network.neutron [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2122.827365] env[63379]: DEBUG nova.policy [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2e7c2125f0044508dc4016c4de224e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9746ae945355479fa5880802e08d2b0a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2123.077887] env[63379]: DEBUG nova.network.neutron [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Successfully created port: 31cbb903-9f03-4e2c-bbd3-7338b8533889 {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2123.244794] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.280834] env[63379]: DEBUG nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2123.744529] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.245911] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.291379] env[63379]: DEBUG nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2124.318799] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2124.319083] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2124.319273] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2124.319467] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2124.319628] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2124.319824] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2124.320099] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2124.320287] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2124.320460] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2124.320627] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2124.320802] env[63379]: DEBUG nova.virt.hardware [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2124.321725] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34acca05-d7e0-464d-808f-beb9320d3387 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.329579] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29eeef8-a97f-4fb2-adbe-ea889adaa3fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.485286] env[63379]: DEBUG nova.compute.manager [req-ce123274-f6db-442c-ad89-647a8ff0408e req-c1040518-c8f7-46ef-bca4-976970f69996 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Received event network-vif-plugged-31cbb903-9f03-4e2c-bbd3-7338b8533889 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2124.485669] env[63379]: DEBUG oslo_concurrency.lockutils [req-ce123274-f6db-442c-ad89-647a8ff0408e req-c1040518-c8f7-46ef-bca4-976970f69996 service nova] Acquiring lock "dc3117be-09c2-445e-a575-ff588d94238a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.485722] env[63379]: DEBUG oslo_concurrency.lockutils [req-ce123274-f6db-442c-ad89-647a8ff0408e req-c1040518-c8f7-46ef-bca4-976970f69996 service nova] Lock "dc3117be-09c2-445e-a575-ff588d94238a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.485878] env[63379]: DEBUG oslo_concurrency.lockutils [req-ce123274-f6db-442c-ad89-647a8ff0408e req-c1040518-c8f7-46ef-bca4-976970f69996 service nova] Lock "dc3117be-09c2-445e-a575-ff588d94238a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.486092] env[63379]: DEBUG nova.compute.manager [req-ce123274-f6db-442c-ad89-647a8ff0408e req-c1040518-c8f7-46ef-bca4-976970f69996 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] No waiting events found dispatching network-vif-plugged-31cbb903-9f03-4e2c-bbd3-7338b8533889 {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2124.486274] env[63379]: WARNING nova.compute.manager [req-ce123274-f6db-442c-ad89-647a8ff0408e req-c1040518-c8f7-46ef-bca4-976970f69996 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Received unexpected event network-vif-plugged-31cbb903-9f03-4e2c-bbd3-7338b8533889 for instance with vm_state building and task_state spawning. [ 2124.575790] env[63379]: DEBUG nova.network.neutron [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Successfully updated port: 31cbb903-9f03-4e2c-bbd3-7338b8533889 {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2124.746609] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.079441] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "refresh_cache-dc3117be-09c2-445e-a575-ff588d94238a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.079441] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "refresh_cache-dc3117be-09c2-445e-a575-ff588d94238a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.079540] env[63379]: DEBUG nova.network.neutron [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2125.247273] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.619273] env[63379]: DEBUG nova.network.neutron [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2125.748071] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.759324] env[63379]: DEBUG nova.network.neutron [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Updating instance_info_cache with network_info: [{"id": "31cbb903-9f03-4e2c-bbd3-7338b8533889", "address": "fa:16:3e:b8:5e:f3", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cbb903-9f", "ovs_interfaceid": "31cbb903-9f03-4e2c-bbd3-7338b8533889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.247917] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780562, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.964293} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.248273] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd/OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd.vmdk to [datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7/128c5dc2-4347-46b9-bcea-aa973ef8f8d7.vmdk. [ 2126.248385] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Cleaning up location [datastore1] OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2126.248559] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9132879d-544d-4108-bc84-30e5b8e1cddd {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2126.248907] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57311007-16f1-45c9-9d32-58cfc810e935 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.255133] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2126.255133] env[63379]: value = "task-1780563" [ 2126.255133] env[63379]: _type = "Task" [ 2126.255133] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.262673] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "refresh_cache-dc3117be-09c2-445e-a575-ff588d94238a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.262969] env[63379]: DEBUG nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Instance network_info: |[{"id": "31cbb903-9f03-4e2c-bbd3-7338b8533889", "address": "fa:16:3e:b8:5e:f3", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cbb903-9f", "ovs_interfaceid": "31cbb903-9f03-4e2c-bbd3-7338b8533889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2126.263243] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.263574] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:5e:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31cbb903-9f03-4e2c-bbd3-7338b8533889', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2126.270921] env[63379]: DEBUG oslo.service.loopingcall [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2126.271128] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2126.271337] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8590405e-1430-4b76-bbb4-1fa1143770de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.289859] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2126.289859] env[63379]: value = "task-1780564" [ 2126.289859] env[63379]: _type = "Task" [ 2126.289859] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.296904] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780564, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.511083] env[63379]: DEBUG nova.compute.manager [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Received event network-changed-31cbb903-9f03-4e2c-bbd3-7338b8533889 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2126.511288] env[63379]: DEBUG nova.compute.manager [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Refreshing instance network info cache due to event network-changed-31cbb903-9f03-4e2c-bbd3-7338b8533889. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2126.511604] env[63379]: DEBUG oslo_concurrency.lockutils [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] Acquiring lock "refresh_cache-dc3117be-09c2-445e-a575-ff588d94238a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.511840] env[63379]: DEBUG oslo_concurrency.lockutils [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] Acquired lock "refresh_cache-dc3117be-09c2-445e-a575-ff588d94238a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.512137] env[63379]: DEBUG nova.network.neutron [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Refreshing network info cache for port 31cbb903-9f03-4e2c-bbd3-7338b8533889 {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2126.765533] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095297} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.765805] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2126.766068] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7/128c5dc2-4347-46b9-bcea-aa973ef8f8d7.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.766338] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7/128c5dc2-4347-46b9-bcea-aa973ef8f8d7.vmdk to [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2126.766593] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0286108a-95d0-4c30-a7f3-d640c0ae4d31 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.773045] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2126.773045] env[63379]: value = "task-1780565" [ 2126.773045] env[63379]: _type = "Task" [ 2126.773045] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.781068] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.797834] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780564, 'name': CreateVM_Task, 'duration_secs': 0.366004} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.797996] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2126.798622] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.798787] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.799122] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2126.799361] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38123627-e85d-4178-bf4a-822c4bfe9624 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.803330] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2126.803330] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cd68ed-88a2-99db-e0cb-06af39c9252e" [ 2126.803330] env[63379]: _type = "Task" [ 2126.803330] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.810155] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cd68ed-88a2-99db-e0cb-06af39c9252e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.269649] env[63379]: DEBUG nova.network.neutron [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Updated VIF entry in instance network info cache for port 31cbb903-9f03-4e2c-bbd3-7338b8533889. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2127.270092] env[63379]: DEBUG nova.network.neutron [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Updating instance_info_cache with network_info: [{"id": "31cbb903-9f03-4e2c-bbd3-7338b8533889", "address": "fa:16:3e:b8:5e:f3", "network": {"id": "13b14fc1-6384-47ab-b623-f48d1ef0c41e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1646386679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9746ae945355479fa5880802e08d2b0a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31cbb903-9f", "ovs_interfaceid": "31cbb903-9f03-4e2c-bbd3-7338b8533889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.283563] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780565, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.313554] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52cd68ed-88a2-99db-e0cb-06af39c9252e, 'name': SearchDatastore_Task, 'duration_secs': 0.050479} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.313781] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.314017] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2127.314276] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2127.314431] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2127.314616] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2127.315230] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2ff86f0-b8d2-43aa-95ec-a2c766d834db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.331941] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2127.332186] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2127.332936] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2196bca-c365-4a51-ad51-ba41c200b235 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.338093] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2127.338093] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f9ee23-8d69-25f8-e2ae-34f593b40ab3" [ 2127.338093] env[63379]: _type = "Task" [ 2127.338093] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.345920] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f9ee23-8d69-25f8-e2ae-34f593b40ab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.773801] env[63379]: DEBUG oslo_concurrency.lockutils [req-baa1e14b-aa4e-449d-bde7-a6948b68e961 req-8179b0d5-5bb5-4fff-8efb-28c4e4338716 service nova] Releasing lock "refresh_cache-dc3117be-09c2-445e-a575-ff588d94238a" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.783468] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780565, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.849049] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52f9ee23-8d69-25f8-e2ae-34f593b40ab3, 'name': SearchDatastore_Task, 'duration_secs': 0.057588} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.849924] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e343ae0-5a05-4c4a-b092-05d82bbbc7c6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.856387] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2127.856387] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bd6499-88be-6bbf-a472-d9d3d3612fa1" [ 2127.856387] env[63379]: _type = "Task" [ 2127.856387] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.866158] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bd6499-88be-6bbf-a472-d9d3d3612fa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.284655] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780565, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.368454] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52bd6499-88be-6bbf-a472-d9d3d3612fa1, 'name': SearchDatastore_Task, 'duration_secs': 0.084396} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.368736] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.368999] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] dc3117be-09c2-445e-a575-ff588d94238a/dc3117be-09c2-445e-a575-ff588d94238a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2128.369296] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b79f5b9d-eee4-4f21-aba6-ea0031e76604 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.376041] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2128.376041] env[63379]: value = "task-1780566" [ 2128.376041] env[63379]: _type = "Task" [ 2128.376041] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.384088] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.784682] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780565, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.885716] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.286837] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780565, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.387738] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.788080] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780565, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.715931} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.788375] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/128c5dc2-4347-46b9-bcea-aa973ef8f8d7/128c5dc2-4347-46b9-bcea-aa973ef8f8d7.vmdk to [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2129.789323] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3a1823-bf45-4dc6-a3e1-6c12d7ea5f23 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.813854] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2129.814276] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff5fcc57-b954-4ec5-9ca3-2a8bb0ff02c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.835104] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2129.835104] env[63379]: value = "task-1780567" [ 2129.835104] env[63379]: _type = "Task" [ 2129.835104] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.844684] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780567, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.886723] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780566, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.487225} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.887031] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] dc3117be-09c2-445e-a575-ff588d94238a/dc3117be-09c2-445e-a575-ff588d94238a.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2129.887241] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2129.887497] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eca135f2-5add-4fc3-936f-74f85759a970 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.894764] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2129.894764] env[63379]: value = "task-1780568" [ 2129.894764] env[63379]: _type = "Task" [ 2129.894764] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.902608] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.345016] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780567, 'name': ReconfigVM_Task, 'duration_secs': 0.270776} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.345328] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e/7f0c426b-1ce3-469f-8ee1-6dd2178f014e.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2130.345927] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-59712a0a-d1ed-4cba-9b49-18714a097986 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.352459] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2130.352459] env[63379]: value = "task-1780569" [ 2130.352459] env[63379]: _type = "Task" [ 2130.352459] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.359574] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780569, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.403919] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063829} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.404234] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2130.405044] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8c4327-aba4-4af6-89da-88cc79171431 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.428111] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] dc3117be-09c2-445e-a575-ff588d94238a/dc3117be-09c2-445e-a575-ff588d94238a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2130.428396] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49ce9503-2e5c-45b8-b36f-35f2015e0911 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.453841] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2130.453841] env[63379]: value = "task-1780570" [ 2130.453841] env[63379]: _type = "Task" [ 2130.453841] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.465028] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780570, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.861720] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780569, 'name': Rename_Task, 'duration_secs': 0.188944} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.862015] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2130.863494] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efaba4f7-bd63-4ac8-b2bc-c0c9b6887599 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.867944] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2130.867944] env[63379]: value = "task-1780571" [ 2130.867944] env[63379]: _type = "Task" [ 2130.867944] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.874897] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.962949] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780570, 'name': ReconfigVM_Task, 'duration_secs': 0.282124} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.963258] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Reconfigured VM instance instance-0000007a to attach disk [datastore1] dc3117be-09c2-445e-a575-ff588d94238a/dc3117be-09c2-445e-a575-ff588d94238a.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2130.963876] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d5563c1-1f45-4414-8612-8278f9356d0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.970227] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2130.970227] env[63379]: value = "task-1780572" [ 2130.970227] env[63379]: _type = "Task" [ 2130.970227] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.982446] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780572, 'name': Rename_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.378721] env[63379]: DEBUG oslo_vmware.api [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780571, 'name': PowerOnVM_Task, 'duration_secs': 0.438647} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.379224] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2131.480867] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780572, 'name': Rename_Task, 'duration_secs': 0.13524} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.481834] env[63379]: DEBUG nova.compute.manager [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2131.482138] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2131.482850] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86dd901-d7fc-447d-a1e7-93e6091a4978 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.485242] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82b0a61b-9ca3-4dbf-abad-0bed321a85d9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.493382] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2131.493382] env[63379]: value = "task-1780573" [ 2131.493382] env[63379]: _type = "Task" [ 2131.493382] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.500530] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780573, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.004729] env[63379]: DEBUG oslo_concurrency.lockutils [None req-178ecc1d-6abd-40e9-b04c-e892fbcb5356 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.549s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.011125] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780573, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.508518] env[63379]: DEBUG oslo_vmware.api [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780573, 'name': PowerOnVM_Task, 'duration_secs': 0.806905} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.508911] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2132.509041] env[63379]: INFO nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Took 8.22 seconds to spawn the instance on the hypervisor. [ 2132.509204] env[63379]: DEBUG nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2132.510033] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e321d5-dd8e-4672-97a4-64bd096ee033 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.029175] env[63379]: INFO nova.compute.manager [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Took 12.90 seconds to build instance. [ 2133.409319] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.409580] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.409920] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.410155] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.410341] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.412435] env[63379]: INFO nova.compute.manager [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Terminating instance [ 2133.414207] env[63379]: DEBUG nova.compute.manager [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2133.414409] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2133.415256] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba42d406-ac1c-4204-aead-99bb933b21ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.423229] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2133.423457] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a740bb87-38b1-417f-a942-682696109e8e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.429230] env[63379]: DEBUG oslo_vmware.api [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2133.429230] env[63379]: value = "task-1780574" [ 2133.429230] env[63379]: _type = "Task" [ 2133.429230] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.436549] env[63379]: DEBUG oslo_vmware.api [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780574, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.531444] env[63379]: DEBUG oslo_concurrency.lockutils [None req-244ed2c7-372c-49fa-b967-fb0072b6871c tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.414s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.555213] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "dc3117be-09c2-445e-a575-ff588d94238a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.555468] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.555654] env[63379]: DEBUG nova.compute.manager [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2133.556583] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a490b36-b4c3-4bc6-81ff-30e7da293ae5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.563219] env[63379]: DEBUG nova.compute.manager [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63379) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2133.563770] env[63379]: DEBUG nova.objects.instance [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'flavor' on Instance uuid dc3117be-09c2-445e-a575-ff588d94238a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2133.940257] env[63379]: DEBUG oslo_vmware.api [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780574, 'name': PowerOffVM_Task, 'duration_secs': 0.21246} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.940553] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2133.940727] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2133.940982] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c08816a1-7db8-42c9-a393-367996303c6c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.010022] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2134.010022] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2134.010022] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleting the datastore file [datastore1] 7f0c426b-1ce3-469f-8ee1-6dd2178f014e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2134.010022] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22465adf-f091-4cd5-9023-25a989d15e5d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.015174] env[63379]: DEBUG oslo_vmware.api [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for the task: (returnval){ [ 2134.015174] env[63379]: value = "task-1780576" [ 2134.015174] env[63379]: _type = "Task" [ 2134.015174] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.022842] env[63379]: DEBUG oslo_vmware.api [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.071908] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2134.072198] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0aa49ee1-973e-4156-afb7-fc4ae66ca9a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.078206] env[63379]: DEBUG oslo_vmware.api [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2134.078206] env[63379]: value = "task-1780577" [ 2134.078206] env[63379]: _type = "Task" [ 2134.078206] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.085713] env[63379]: DEBUG oslo_vmware.api [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.524461] env[63379]: DEBUG oslo_vmware.api [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Task: {'id': task-1780576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163021} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.524731] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2134.524924] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2134.525132] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2134.525323] env[63379]: INFO nova.compute.manager [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2134.525567] env[63379]: DEBUG oslo.service.loopingcall [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2134.525765] env[63379]: DEBUG nova.compute.manager [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2134.525867] env[63379]: DEBUG nova.network.neutron [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2134.587382] env[63379]: DEBUG oslo_vmware.api [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780577, 'name': PowerOffVM_Task, 'duration_secs': 0.202122} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.587708] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2134.587846] env[63379]: DEBUG nova.compute.manager [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2134.588647] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17b7799-3b78-474e-a5cd-be930431eb86 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.927803] env[63379]: DEBUG nova.compute.manager [req-4a194c4e-d932-468e-a7d5-bce8bf199d0f req-fefbac0d-ea39-4881-a726-cae347e3dc53 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Received event network-vif-deleted-f4822da6-3551-4e0b-937f-55536f9c7342 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2134.927986] env[63379]: INFO nova.compute.manager [req-4a194c4e-d932-468e-a7d5-bce8bf199d0f req-fefbac0d-ea39-4881-a726-cae347e3dc53 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Neutron deleted interface f4822da6-3551-4e0b-937f-55536f9c7342; detaching it from the instance and deleting it from the info cache [ 2134.928180] env[63379]: DEBUG nova.network.neutron [req-4a194c4e-d932-468e-a7d5-bce8bf199d0f req-fefbac0d-ea39-4881-a726-cae347e3dc53 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.098940] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cab6bf00-2e8f-4e75-9e96-e45370e566f8 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.543s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.409509] env[63379]: DEBUG nova.network.neutron [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.431262] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa9e6ca4-6f88-4232-b4bb-41aa5e2c7c70 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.441826] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d67f31-8be3-4447-a9c4-ba1dd70a8328 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.468434] env[63379]: DEBUG nova.compute.manager [req-4a194c4e-d932-468e-a7d5-bce8bf199d0f req-fefbac0d-ea39-4881-a726-cae347e3dc53 service nova] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Detach interface failed, port_id=f4822da6-3551-4e0b-937f-55536f9c7342, reason: Instance 7f0c426b-1ce3-469f-8ee1-6dd2178f014e could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2135.912373] env[63379]: INFO nova.compute.manager [-] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Took 1.39 seconds to deallocate network for instance. [ 2136.221602] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "dc3117be-09c2-445e-a575-ff588d94238a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.221813] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.222074] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "dc3117be-09c2-445e-a575-ff588d94238a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.222223] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.222403] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.224464] env[63379]: INFO nova.compute.manager [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Terminating instance [ 2136.226109] env[63379]: DEBUG nova.compute.manager [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2136.226311] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2136.227179] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ffce7c-5ee8-4343-8584-335b004634cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.235253] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2136.235751] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b305094-858f-465b-9993-12efbcfa5b19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.312539] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2136.312766] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2136.312980] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleting the datastore file [datastore1] dc3117be-09c2-445e-a575-ff588d94238a {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2136.313228] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93d77da2-703d-4928-8902-050ad89cbdbf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.318837] env[63379]: DEBUG oslo_vmware.api [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2136.318837] env[63379]: value = "task-1780579" [ 2136.318837] env[63379]: _type = "Task" [ 2136.318837] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.326193] env[63379]: DEBUG oslo_vmware.api [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.418941] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.419125] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.419447] env[63379]: DEBUG nova.objects.instance [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lazy-loading 'resources' on Instance uuid 7f0c426b-1ce3-469f-8ee1-6dd2178f014e {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2136.827801] env[63379]: DEBUG oslo_vmware.api [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113188} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.828090] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2136.828286] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2136.828469] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2136.828645] env[63379]: INFO nova.compute.manager [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2136.828887] env[63379]: DEBUG oslo.service.loopingcall [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2136.829087] env[63379]: DEBUG nova.compute.manager [-] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2136.829189] env[63379]: DEBUG nova.network.neutron [-] [instance: dc3117be-09c2-445e-a575-ff588d94238a] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2137.055533] env[63379]: DEBUG nova.compute.manager [req-8519d91f-97a8-47fb-a922-496739c98f2a req-f260cc95-e97f-4374-be80-a275a45171ad service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Received event network-vif-deleted-31cbb903-9f03-4e2c-bbd3-7338b8533889 {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2137.055765] env[63379]: INFO nova.compute.manager [req-8519d91f-97a8-47fb-a922-496739c98f2a req-f260cc95-e97f-4374-be80-a275a45171ad service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Neutron deleted interface 31cbb903-9f03-4e2c-bbd3-7338b8533889; detaching it from the instance and deleting it from the info cache [ 2137.055971] env[63379]: DEBUG nova.network.neutron [req-8519d91f-97a8-47fb-a922-496739c98f2a req-f260cc95-e97f-4374-be80-a275a45171ad service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.069729] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a8bcf5-33cf-4e0c-af09-f86264beafea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.078018] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69653f74-9f84-4ce8-a932-5a110d065c42 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.108217] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fc4408-7ba2-4e8c-8b8a-cfc455955f1a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.115074] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade48f6f-1922-48ef-acb6-06c41b30cc92 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.128044] env[63379]: DEBUG nova.compute.provider_tree [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2137.532633] env[63379]: DEBUG nova.network.neutron [-] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.561175] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b02326c5-cc07-4b8c-a6bd-e2f68d9c4182 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.571027] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5f2b2a-868c-4a25-82ed-3e6c66840216 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.595726] env[63379]: DEBUG nova.compute.manager [req-8519d91f-97a8-47fb-a922-496739c98f2a req-f260cc95-e97f-4374-be80-a275a45171ad service nova] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Detach interface failed, port_id=31cbb903-9f03-4e2c-bbd3-7338b8533889, reason: Instance dc3117be-09c2-445e-a575-ff588d94238a could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2137.632130] env[63379]: DEBUG nova.scheduler.client.report [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2138.034702] env[63379]: INFO nova.compute.manager [-] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Took 1.21 seconds to deallocate network for instance. [ 2138.136643] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.154374] env[63379]: INFO nova.scheduler.client.report [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Deleted allocations for instance 7f0c426b-1ce3-469f-8ee1-6dd2178f014e [ 2138.541512] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.541667] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2138.541891] env[63379]: DEBUG nova.objects.instance [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'resources' on Instance uuid dc3117be-09c2-445e-a575-ff588d94238a {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2138.661886] env[63379]: DEBUG oslo_concurrency.lockutils [None req-cf355689-d6de-40af-9d6f-3b197d2e3cc8 tempest-ServerActionsTestOtherB-1503948534 tempest-ServerActionsTestOtherB-1503948534-project-member] Lock "7f0c426b-1ce3-469f-8ee1-6dd2178f014e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.252s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.081057] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5748eb6-aa5f-44e4-a22b-a1febd7d2cc2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.088060] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bcc4c0-6d78-4ea3-aef7-a4e166594b8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.117107] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26da63e1-4ba3-42e4-912f-0d68e9a9b9db {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.123713] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0953404-ee4c-4108-b6a5-2dbeeb0ed808 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.136380] env[63379]: DEBUG nova.compute.provider_tree [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2139.639308] env[63379]: DEBUG nova.scheduler.client.report [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2140.144379] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.602s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.160870] env[63379]: INFO nova.scheduler.client.report [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted allocations for instance dc3117be-09c2-445e-a575-ff588d94238a [ 2140.668497] env[63379]: DEBUG oslo_concurrency.lockutils [None req-ad30d588-7c8a-4e3b-b65d-4eab65efa661 tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "dc3117be-09c2-445e-a575-ff588d94238a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.446s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.934619] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "85ecb409-ab53-43d9-8120-2f8c7402d74c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.935057] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.935337] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "85ecb409-ab53-43d9-8120-2f8c7402d74c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.935579] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.935796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.938132] env[63379]: INFO nova.compute.manager [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Terminating instance [ 2141.940220] env[63379]: DEBUG nova.compute.manager [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2141.940452] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2141.941394] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc565da-4b39-4e6a-9707-b3ea8dc5040e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.949433] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2141.949670] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9bef85a-1399-42e0-963c-b32607881589 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.956054] env[63379]: DEBUG oslo_vmware.api [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2141.956054] env[63379]: value = "task-1780581" [ 2141.956054] env[63379]: _type = "Task" [ 2141.956054] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.963806] env[63379]: DEBUG oslo_vmware.api [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780581, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.466830] env[63379]: DEBUG oslo_vmware.api [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780581, 'name': PowerOffVM_Task, 'duration_secs': 0.19424} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.467129] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2142.467339] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2142.467588] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a3a6d1e-e141-4af3-ae73-8ae5f277fb79 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.533682] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2142.533906] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2142.534114] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleting the datastore file [datastore1] 85ecb409-ab53-43d9-8120-2f8c7402d74c {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2142.534383] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e27d8fd-367c-40ca-8a78-51366aaed416 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.541686] env[63379]: DEBUG oslo_vmware.api [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for the task: (returnval){ [ 2142.541686] env[63379]: value = "task-1780583" [ 2142.541686] env[63379]: _type = "Task" [ 2142.541686] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.549317] env[63379]: DEBUG oslo_vmware.api [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.052303] env[63379]: DEBUG oslo_vmware.api [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Task: {'id': task-1780583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123803} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.052602] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2143.052796] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2143.052979] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2143.053178] env[63379]: INFO nova.compute.manager [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2143.053429] env[63379]: DEBUG oslo.service.loopingcall [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2143.053623] env[63379]: DEBUG nova.compute.manager [-] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2143.053719] env[63379]: DEBUG nova.network.neutron [-] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2143.325428] env[63379]: DEBUG nova.compute.manager [req-b5f8c1fe-2c5c-4b4a-8946-c4cffb91a0dd req-928f72e4-4464-40e3-9b87-c3ce50cf9c7b service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Received event network-vif-deleted-ef74ed84-a494-4ce8-a037-458fd0285f2b {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2143.325545] env[63379]: INFO nova.compute.manager [req-b5f8c1fe-2c5c-4b4a-8946-c4cffb91a0dd req-928f72e4-4464-40e3-9b87-c3ce50cf9c7b service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Neutron deleted interface ef74ed84-a494-4ce8-a037-458fd0285f2b; detaching it from the instance and deleting it from the info cache [ 2143.325706] env[63379]: DEBUG nova.network.neutron [req-b5f8c1fe-2c5c-4b4a-8946-c4cffb91a0dd req-928f72e4-4464-40e3-9b87-c3ce50cf9c7b service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.609669] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "021d4755-9144-43c7-8c86-f167b7b294e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.609920] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "021d4755-9144-43c7-8c86-f167b7b294e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.801752] env[63379]: DEBUG nova.network.neutron [-] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.828614] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2a6d0fd-5d7c-4fa4-b09a-ad542455983d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.838099] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb27bf27-3193-4906-b9a5-cbb4da34be84 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.861162] env[63379]: DEBUG nova.compute.manager [req-b5f8c1fe-2c5c-4b4a-8946-c4cffb91a0dd req-928f72e4-4464-40e3-9b87-c3ce50cf9c7b service nova] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Detach interface failed, port_id=ef74ed84-a494-4ce8-a037-458fd0285f2b, reason: Instance 85ecb409-ab53-43d9-8120-2f8c7402d74c could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2144.111908] env[63379]: DEBUG nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2144.305039] env[63379]: INFO nova.compute.manager [-] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Took 1.25 seconds to deallocate network for instance. [ 2144.633753] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.634038] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.635522] env[63379]: INFO nova.compute.claims [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2144.812866] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.679957] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be26b59-f86c-4acb-a279-cdef3faceb6b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.687583] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccd1c76-89cc-460d-b6a1-e8aa53570d13 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.717128] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6726cb-ab45-4dc7-9bdd-fbf5a314b8f0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.724138] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c91fe2-ea1d-4d0a-93fa-1cb2da4c310d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.736745] env[63379]: DEBUG nova.compute.provider_tree [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2146.240176] env[63379]: DEBUG nova.scheduler.client.report [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2146.746336] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.112s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.747143] env[63379]: DEBUG nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2146.751210] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.938s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.752016] env[63379]: DEBUG nova.objects.instance [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lazy-loading 'resources' on Instance uuid 85ecb409-ab53-43d9-8120-2f8c7402d74c {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2147.255288] env[63379]: DEBUG nova.compute.utils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2147.259470] env[63379]: DEBUG nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Not allocating networking since 'none' was specified. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 2147.293851] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17572c9-797a-4bc1-a1e7-8bb4418b640a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.302068] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12a760c-d66a-4f96-a3d1-156d55abea32 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.331055] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3570d9a2-d596-496b-bef6-7c05d8d7bf28 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.338189] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40305c42-e55b-4fa0-992a-a0ab858bf9c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.351853] env[63379]: DEBUG nova.compute.provider_tree [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2147.760630] env[63379]: DEBUG nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2147.854916] env[63379]: DEBUG nova.scheduler.client.report [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2148.359278] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.608s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.378029] env[63379]: INFO nova.scheduler.client.report [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Deleted allocations for instance 85ecb409-ab53-43d9-8120-2f8c7402d74c [ 2148.770687] env[63379]: DEBUG nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2148.797009] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2148.797293] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2148.797480] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2148.797672] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2148.797825] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2148.797978] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2148.798229] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2148.798407] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2148.798573] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2148.798740] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2148.798917] env[63379]: DEBUG nova.virt.hardware [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2148.799790] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870f88fc-0ae0-44a0-b755-0e2c7f07f605 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.807765] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e25c4b-f021-46ae-8cff-3911d0b47f2a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.820775] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Instance VIF info [] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2148.826284] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Creating folder: Project (8be2d444999746b7a1c3315ff9aa6509). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2148.826701] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b8c80c5-8d23-4453-b623-0baeb0c5312c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.836969] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Created folder: Project (8be2d444999746b7a1c3315ff9aa6509) in parent group-v369214. [ 2148.837168] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Creating folder: Instances. Parent ref: group-v369536. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2148.837379] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3595da8b-ea97-4bb3-9f97-bf83ce25aa30 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.846068] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Created folder: Instances in parent group-v369536. [ 2148.846298] env[63379]: DEBUG oslo.service.loopingcall [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2148.846480] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2148.846706] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-babf236f-8bc1-43fe-8cf8-a88121c84c44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.862722] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2148.862722] env[63379]: value = "task-1780586" [ 2148.862722] env[63379]: _type = "Task" [ 2148.862722] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.872269] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780586, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.885052] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d37a6a0a-65ff-4f76-9aa2-cdfbad730a4a tempest-ServersTestJSON-1933653091 tempest-ServersTestJSON-1933653091-project-member] Lock "85ecb409-ab53-43d9-8120-2f8c7402d74c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.950s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.373354] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780586, 'name': CreateVM_Task, 'duration_secs': 0.235412} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.373751] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2149.373984] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.374122] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.374607] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2149.374730] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf43fb07-b1ca-464b-b041-5abeec9e9043 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.379524] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2149.379524] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b1653f-3cd5-6975-02fc-1c6a676a4776" [ 2149.379524] env[63379]: _type = "Task" [ 2149.379524] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.388413] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b1653f-3cd5-6975-02fc-1c6a676a4776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.892471] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b1653f-3cd5-6975-02fc-1c6a676a4776, 'name': SearchDatastore_Task, 'duration_secs': 0.009202} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.892471] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.892471] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2149.892471] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.892471] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.892471] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2149.892471] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-681aabf5-9b4b-4f8a-a936-61ccd925bb31 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.903740] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2149.903972] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2149.904772] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2acb6d-d9f4-4cbb-b593-b132ecbef915 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.910510] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2149.910510] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b2ce43-26d0-5813-70a7-6a9d470b710b" [ 2149.910510] env[63379]: _type = "Task" [ 2149.910510] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.919190] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b2ce43-26d0-5813-70a7-6a9d470b710b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.422992] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b2ce43-26d0-5813-70a7-6a9d470b710b, 'name': SearchDatastore_Task, 'duration_secs': 0.009391} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.423945] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77dbc734-3136-43d6-9b68-623ebddde857 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.429535] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2150.429535] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]525230f7-3ca1-0eef-0acf-cc22b2f3fbfc" [ 2150.429535] env[63379]: _type = "Task" [ 2150.429535] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.438912] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525230f7-3ca1-0eef-0acf-cc22b2f3fbfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.946091] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]525230f7-3ca1-0eef-0acf-cc22b2f3fbfc, 'name': SearchDatastore_Task, 'duration_secs': 0.011097} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.946091] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2150.946673] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 021d4755-9144-43c7-8c86-f167b7b294e4/021d4755-9144-43c7-8c86-f167b7b294e4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2150.946673] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a63eab0b-72fd-4a2b-ba2c-6655fb9708c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.955158] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2150.955158] env[63379]: value = "task-1780587" [ 2150.955158] env[63379]: _type = "Task" [ 2150.955158] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.967505] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.465292] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780587, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463456} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.465497] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 021d4755-9144-43c7-8c86-f167b7b294e4/021d4755-9144-43c7-8c86-f167b7b294e4.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2151.465712] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2151.465965] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57cf3d8c-5faa-4498-8a15-827f5c0348d4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.472206] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2151.472206] env[63379]: value = "task-1780588" [ 2151.472206] env[63379]: _type = "Task" [ 2151.472206] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.479097] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780588, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.981791] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780588, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062627} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.982180] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2151.982831] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48da86a-eb03-4c5c-9c7a-795b95e676e3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.001847] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 021d4755-9144-43c7-8c86-f167b7b294e4/021d4755-9144-43c7-8c86-f167b7b294e4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2152.002072] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4967f616-04dc-4c5f-8c6d-986a074fee55 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.020392] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2152.020392] env[63379]: value = "task-1780589" [ 2152.020392] env[63379]: _type = "Task" [ 2152.020392] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.027519] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780589, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.530384] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780589, 'name': ReconfigVM_Task, 'duration_secs': 0.277452} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2152.530734] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 021d4755-9144-43c7-8c86-f167b7b294e4/021d4755-9144-43c7-8c86-f167b7b294e4.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2152.531347] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-675a57d0-fceb-4857-816d-5f99fb699d87 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.537563] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2152.537563] env[63379]: value = "task-1780590" [ 2152.537563] env[63379]: _type = "Task" [ 2152.537563] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.544821] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780590, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.047312] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780590, 'name': Rename_Task, 'duration_secs': 0.124478} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.047707] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2153.047826] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-648afd9e-2026-4014-80fd-8fa1142542a8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.054165] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2153.054165] env[63379]: value = "task-1780591" [ 2153.054165] env[63379]: _type = "Task" [ 2153.054165] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.062415] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.564075] env[63379]: DEBUG oslo_vmware.api [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780591, 'name': PowerOnVM_Task, 'duration_secs': 0.390711} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.564540] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2153.564540] env[63379]: INFO nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Took 4.79 seconds to spawn the instance on the hypervisor. [ 2153.564864] env[63379]: DEBUG nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2153.565487] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d95ebf5-ac25-44e3-a4b1-33762150b5ca {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.082055] env[63379]: INFO nova.compute.manager [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Took 9.47 seconds to build instance. [ 2154.520540] env[63379]: DEBUG nova.compute.manager [None req-536449e0-4033-48f2-b14b-78e801b99849 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2154.521552] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8752d8ec-f565-45c8-90a0-f4d6784c71a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.575153] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "021d4755-9144-43c7-8c86-f167b7b294e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2154.584230] env[63379]: DEBUG oslo_concurrency.lockutils [None req-d6d40697-fc56-4814-8842-b02622590885 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "021d4755-9144-43c7-8c86-f167b7b294e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.974s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.584581] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "021d4755-9144-43c7-8c86-f167b7b294e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.010s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.584873] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "021d4755-9144-43c7-8c86-f167b7b294e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2154.585190] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "021d4755-9144-43c7-8c86-f167b7b294e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.585445] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "021d4755-9144-43c7-8c86-f167b7b294e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.587813] env[63379]: INFO nova.compute.manager [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Terminating instance [ 2154.589833] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "refresh_cache-021d4755-9144-43c7-8c86-f167b7b294e4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.590071] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquired lock "refresh_cache-021d4755-9144-43c7-8c86-f167b7b294e4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.590330] env[63379]: DEBUG nova.network.neutron [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2155.032556] env[63379]: INFO nova.compute.manager [None req-536449e0-4033-48f2-b14b-78e801b99849 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] instance snapshotting [ 2155.033184] env[63379]: DEBUG nova.objects.instance [None req-536449e0-4033-48f2-b14b-78e801b99849 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lazy-loading 'flavor' on Instance uuid 021d4755-9144-43c7-8c86-f167b7b294e4 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2155.107691] env[63379]: DEBUG nova.network.neutron [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2155.163836] env[63379]: DEBUG nova.network.neutron [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.539132] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b74115e-f5fb-4563-8028-532b8e9a8e08 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.555983] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc77a1cf-ad16-4dd5-9223-1870fd8918b6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.608541] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.608749] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2155.666015] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Releasing lock "refresh_cache-021d4755-9144-43c7-8c86-f167b7b294e4" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2155.666477] env[63379]: DEBUG nova.compute.manager [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2155.666681] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2155.667566] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91c06fa-698b-4be8-bfc4-38a60bcf9eaa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.675802] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2155.676028] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8c9be58-7575-4c3d-ab1d-bd9af0176057 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.682020] env[63379]: DEBUG oslo_vmware.api [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2155.682020] env[63379]: value = "task-1780592" [ 2155.682020] env[63379]: _type = "Task" [ 2155.682020] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2155.689848] env[63379]: DEBUG oslo_vmware.api [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.066088] env[63379]: DEBUG nova.compute.manager [None req-536449e0-4033-48f2-b14b-78e801b99849 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Instance disappeared during snapshot {{(pid=63379) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 2156.114634] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.114940] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2156.175459] env[63379]: DEBUG nova.compute.manager [None req-536449e0-4033-48f2-b14b-78e801b99849 tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Found 0 images (rotation: 2) {{(pid=63379) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 2156.192292] env[63379]: DEBUG oslo_vmware.api [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780592, 'name': PowerOffVM_Task, 'duration_secs': 0.125613} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.192573] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2156.192746] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2156.192988] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-574cea7b-626f-43b9-8a32-b1f06259d652 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.219290] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2156.219515] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2156.219703] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Deleting the datastore file [datastore1] 021d4755-9144-43c7-8c86-f167b7b294e4 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2156.219956] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0dc169f7-6c42-4aaf-bf0f-30e8947fbb19 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.226087] env[63379]: DEBUG oslo_vmware.api [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for the task: (returnval){ [ 2156.226087] env[63379]: value = "task-1780594" [ 2156.226087] env[63379]: _type = "Task" [ 2156.226087] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2156.233712] env[63379]: DEBUG oslo_vmware.api [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.617301] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 2156.617525] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.617675] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.617823] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.617973] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.618134] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.618286] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.618419] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2156.618557] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.735353] env[63379]: DEBUG oslo_vmware.api [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Task: {'id': task-1780594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137761} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.735606] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2156.735798] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2156.735973] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2156.736165] env[63379]: INFO nova.compute.manager [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Took 1.07 seconds to destroy the instance on the hypervisor. [ 2156.736407] env[63379]: DEBUG oslo.service.loopingcall [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2156.736601] env[63379]: DEBUG nova.compute.manager [-] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2156.736697] env[63379]: DEBUG nova.network.neutron [-] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2156.752219] env[63379]: DEBUG nova.network.neutron [-] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2157.121932] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.122355] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.122412] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2157.122591] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2157.123517] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38836341-855c-4367-994d-cf0b20710540 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.132517] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bfb8df-e677-4849-8471-9c08d123b363 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.146016] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1e69ed-c882-4bc3-a3f3-18737f7f0431 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.152851] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9126320d-d01a-4834-be5a-9903640e3489 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.182277] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181027MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2157.182424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.182592] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2157.254616] env[63379]: DEBUG nova.network.neutron [-] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2157.757381] env[63379]: INFO nova.compute.manager [-] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Took 1.02 seconds to deallocate network for instance. [ 2158.206285] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 021d4755-9144-43c7-8c86-f167b7b294e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2158.206550] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2158.206634] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2158.230585] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d46c49c-1d2f-44d6-8b17-a5ec330ad2cf {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.237660] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cb1540-0435-49a3-83fe-46920307a190 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.266169] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.267013] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fe1b45-b34d-413c-8598-8be414c66c03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.273578] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee60a95a-6ca3-4170-91a4-2afecd7208cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.933032] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2159.435876] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2159.940712] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2159.940921] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.758s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.941216] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.675s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.941447] env[63379]: DEBUG nova.objects.instance [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lazy-loading 'resources' on Instance uuid 021d4755-9144-43c7-8c86-f167b7b294e4 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2160.470991] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059bff9a-69e0-4d19-a289-479636b596b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.478414] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e36047f-7014-49c7-a27c-5e32c46479ee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.507413] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0db66db-b6e4-4b83-b1c8-9b110bed69ae {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.514451] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde66d3b-3676-4580-8a69-00a30603d44c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.526955] env[63379]: DEBUG nova.compute.provider_tree [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2161.030633] env[63379]: DEBUG nova.scheduler.client.report [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2161.535621] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.594s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.555618] env[63379]: INFO nova.scheduler.client.report [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Deleted allocations for instance 021d4755-9144-43c7-8c86-f167b7b294e4 [ 2162.063996] env[63379]: DEBUG oslo_concurrency.lockutils [None req-f157294c-5826-4494-b21d-8cb3f542b02b tempest-ServersAaction247Test-607684453 tempest-ServersAaction247Test-607684453-project-member] Lock "021d4755-9144-43c7-8c86-f167b7b294e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.479s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2166.719416] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.719675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.221912] env[63379]: DEBUG nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2167.743623] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.743931] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.745410] env[63379]: INFO nova.compute.claims [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2168.780789] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26e4104-cc95-43ce-a075-54cf510596c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.788892] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dd04a4-0fc2-4d56-a963-6f51fa2a22fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.819018] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2b1963-02cd-4996-9812-e2e35f9f03fa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.825968] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052148ec-4033-4b88-8902-0e03f01cd6aa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.838592] env[63379]: DEBUG nova.compute.provider_tree [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2169.341738] env[63379]: DEBUG nova.scheduler.client.report [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2169.847098] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.847669] env[63379]: DEBUG nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2170.352475] env[63379]: DEBUG nova.compute.utils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2170.353860] env[63379]: DEBUG nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2170.354041] env[63379]: DEBUG nova.network.neutron [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2170.399221] env[63379]: DEBUG nova.policy [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '14ebdcc952084f9e8c91614cca982f3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36ebffe6565d46e48409834197213f5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2170.648547] env[63379]: DEBUG nova.network.neutron [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Successfully created port: ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2170.857663] env[63379]: DEBUG nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2171.867089] env[63379]: DEBUG nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2171.892678] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2171.892930] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2171.893103] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2171.893298] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2171.893450] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2171.893596] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2171.893808] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2171.894026] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2171.894202] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2171.894314] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2171.894486] env[63379]: DEBUG nova.virt.hardware [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2171.895401] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51df6f85-0174-48c7-93a5-152800fd0cde {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.903072] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efeda47-f184-4444-ab3f-6aac503cccd5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.005621] env[63379]: DEBUG nova.compute.manager [req-b95ccbec-0231-40fa-80bf-6b4ce28dc198 req-0f07b3fb-93f1-434b-8eee-dd628f990763 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-vif-plugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2172.005856] env[63379]: DEBUG oslo_concurrency.lockutils [req-b95ccbec-0231-40fa-80bf-6b4ce28dc198 req-0f07b3fb-93f1-434b-8eee-dd628f990763 service nova] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2172.006142] env[63379]: DEBUG oslo_concurrency.lockutils [req-b95ccbec-0231-40fa-80bf-6b4ce28dc198 req-0f07b3fb-93f1-434b-8eee-dd628f990763 service nova] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.006279] env[63379]: DEBUG oslo_concurrency.lockutils [req-b95ccbec-0231-40fa-80bf-6b4ce28dc198 req-0f07b3fb-93f1-434b-8eee-dd628f990763 service nova] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.006454] env[63379]: DEBUG nova.compute.manager [req-b95ccbec-0231-40fa-80bf-6b4ce28dc198 req-0f07b3fb-93f1-434b-8eee-dd628f990763 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] No waiting events found dispatching network-vif-plugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2172.006623] env[63379]: WARNING nova.compute.manager [req-b95ccbec-0231-40fa-80bf-6b4ce28dc198 req-0f07b3fb-93f1-434b-8eee-dd628f990763 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received unexpected event network-vif-plugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a for instance with vm_state building and task_state spawning. [ 2172.087888] env[63379]: DEBUG nova.network.neutron [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Successfully updated port: ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2172.591046] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.591046] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2172.591257] env[63379]: DEBUG nova.network.neutron [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2173.123387] env[63379]: DEBUG nova.network.neutron [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2173.243972] env[63379]: DEBUG nova.network.neutron [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2173.746634] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2173.746968] env[63379]: DEBUG nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Instance network_info: |[{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2173.747425] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:dd:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba0d9b39-04ad-4d23-bb55-cae60747bb6a', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2173.754912] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating folder: Project (36ebffe6565d46e48409834197213f5a). Parent ref: group-v369214. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2173.755213] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fca17d31-b0bc-4aaf-a55f-6d2522655588 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.769424] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created folder: Project (36ebffe6565d46e48409834197213f5a) in parent group-v369214. [ 2173.769600] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating folder: Instances. Parent ref: group-v369539. {{(pid=63379) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2173.769829] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99187105-eb9f-4afc-b61f-13737f518ad1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.779193] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created folder: Instances in parent group-v369539. [ 2173.779429] env[63379]: DEBUG oslo.service.loopingcall [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2173.779655] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2173.779863] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7633c925-8a5c-4dd9-97b9-c05041ee53c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.797128] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2173.797128] env[63379]: value = "task-1780597" [ 2173.797128] env[63379]: _type = "Task" [ 2173.797128] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.803925] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780597, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.072344] env[63379]: DEBUG nova.compute.manager [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2174.072344] env[63379]: DEBUG nova.compute.manager [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing instance network info cache due to event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2174.072344] env[63379]: DEBUG oslo_concurrency.lockutils [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.072344] env[63379]: DEBUG oslo_concurrency.lockutils [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.072344] env[63379]: DEBUG nova.network.neutron [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2174.307249] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780597, 'name': CreateVM_Task, 'duration_secs': 0.324709} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.307667] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2174.308057] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.308238] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.308626] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2174.308881] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7473071-2fa4-40d5-b383-8fbb3da44664 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.313707] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2174.313707] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52346dd9-cfdd-9ffc-27d9-570e6f564710" [ 2174.313707] env[63379]: _type = "Task" [ 2174.313707] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.321282] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52346dd9-cfdd-9ffc-27d9-570e6f564710, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.723209] env[63379]: DEBUG nova.network.neutron [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updated VIF entry in instance network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2174.723603] env[63379]: DEBUG nova.network.neutron [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.824274] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52346dd9-cfdd-9ffc-27d9-570e6f564710, 'name': SearchDatastore_Task, 'duration_secs': 0.008766} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.824538] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2174.824823] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2174.825087] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.825242] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.825425] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2174.825734] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b41d310-4c98-46ac-8709-6f676739b555 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.834021] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2174.834203] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2174.834868] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e85d96c6-728f-4794-bd25-06991d9d0d39 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.839813] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2174.839813] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f474a-74d2-898a-83e4-400a01a52b03" [ 2174.839813] env[63379]: _type = "Task" [ 2174.839813] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.846761] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f474a-74d2-898a-83e4-400a01a52b03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.226443] env[63379]: DEBUG oslo_concurrency.lockutils [req-fd8ae382-e30e-4f67-a933-2467c0f2670d req-3af0f45f-75e1-4a59-ac09-d300128a8fb6 service nova] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.349775] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]521f474a-74d2-898a-83e4-400a01a52b03, 'name': SearchDatastore_Task, 'duration_secs': 0.012611} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.350506] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d9bf8c9-4ff7-499e-8b4d-d86dff0acd83 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.355150] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2175.355150] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5219b244-ac40-0a35-f7b9-23e5883622e5" [ 2175.355150] env[63379]: _type = "Task" [ 2175.355150] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.363274] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5219b244-ac40-0a35-f7b9-23e5883622e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.865456] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5219b244-ac40-0a35-f7b9-23e5883622e5, 'name': SearchDatastore_Task, 'duration_secs': 0.008766} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.865781] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.866088] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2175.866391] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-825cd7c6-d928-4508-a75f-eba4931ee4b4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.872871] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2175.872871] env[63379]: value = "task-1780598" [ 2175.872871] env[63379]: _type = "Task" [ 2175.872871] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.880453] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.383846] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780598, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.883710] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527129} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.883969] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2176.884189] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2176.884444] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9693c1dc-97a7-471b-98fb-c03b6ecb39f0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.891689] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2176.891689] env[63379]: value = "task-1780599" [ 2176.891689] env[63379]: _type = "Task" [ 2176.891689] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.898877] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.401494] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066172} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.402751] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2177.403109] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2df7961-6fa1-48fc-9335-7358a531181a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.424375] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2177.424622] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14ea4d6c-f7e9-4066-b8ce-c75b3c7be44c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.443369] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2177.443369] env[63379]: value = "task-1780600" [ 2177.443369] env[63379]: _type = "Task" [ 2177.443369] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.450712] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780600, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.952880] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.453595] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780600, 'name': ReconfigVM_Task, 'duration_secs': 0.850898} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.454022] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfigured VM instance instance-0000007c to attach disk [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2178.454524] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9433c4bb-4ea5-4295-ae8b-8ca21fc71cb7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.460544] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2178.460544] env[63379]: value = "task-1780601" [ 2178.460544] env[63379]: _type = "Task" [ 2178.460544] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.467790] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780601, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.970405] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780601, 'name': Rename_Task, 'duration_secs': 0.132723} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.970715] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2178.970967] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ca5bb40-2bb7-4f62-82e0-e0d8ebfd1d6e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.977625] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2178.977625] env[63379]: value = "task-1780602" [ 2178.977625] env[63379]: _type = "Task" [ 2178.977625] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.984687] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.487794] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780602, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.988395] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780602, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.490077] env[63379]: DEBUG oslo_vmware.api [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780602, 'name': PowerOnVM_Task, 'duration_secs': 1.036456} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.490411] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2180.490576] env[63379]: INFO nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Took 8.62 seconds to spawn the instance on the hypervisor. [ 2180.490751] env[63379]: DEBUG nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2180.491511] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53da7fa1-1c03-4da5-861b-714f6de48a6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.010094] env[63379]: INFO nova.compute.manager [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Took 13.28 seconds to build instance. [ 2181.512406] env[63379]: DEBUG oslo_concurrency.lockutils [None req-75c6755c-2d39-4632-a96e-5db85e16c223 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.792s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.104021] env[63379]: DEBUG nova.compute.manager [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2182.104248] env[63379]: DEBUG nova.compute.manager [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing instance network info cache due to event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2182.104467] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2182.104615] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2182.104781] env[63379]: DEBUG nova.network.neutron [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2182.815326] env[63379]: DEBUG nova.network.neutron [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updated VIF entry in instance network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2182.815769] env[63379]: DEBUG nova.network.neutron [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.318997] env[63379]: DEBUG oslo_concurrency.lockutils [req-c1fec978-fc81-4890-9b93-c3bbb8e95969 req-3ca7f019-5588-4c8e-bcd0-7a0a61e3d226 service nova] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2219.943278] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2219.943677] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2219.943677] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2219.943827] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 2220.351561] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.351795] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.351988] env[63379]: INFO nova.compute.manager [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Shelving [ 2220.447059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2220.447059] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2220.447059] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2220.447356] env[63379]: DEBUG nova.objects.instance [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lazy-loading 'info_cache' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2220.860068] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2220.860353] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d276acf9-8cd5-41be-9cda-bab13dbdb5e6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.867177] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2220.867177] env[63379]: value = "task-1780603" [ 2220.867177] env[63379]: _type = "Task" [ 2220.867177] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.874939] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.377788] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780603, 'name': PowerOffVM_Task, 'duration_secs': 0.169932} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2221.378156] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2221.378819] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2069a54c-6311-4c8e-9dcb-5df09cd8baf7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.396092] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7056168-3a7e-4b42-bfc4-e845a11fcd61 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.906405] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2221.906729] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-daf4084e-2b15-4cc5-af16-0832b11bc586 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.914147] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2221.914147] env[63379]: value = "task-1780604" [ 2221.914147] env[63379]: _type = "Task" [ 2221.914147] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2221.922350] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780604, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.191736] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2222.423913] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780604, 'name': CreateSnapshot_Task, 'duration_secs': 0.430672} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.424371] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2222.424948] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd70ee0b-5e4d-452d-bc4c-76c743617d44 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.694840] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2222.695074] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 2222.695267] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.695429] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.695603] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.695760] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.695901] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.696057] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.696190] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2222.696333] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.941624] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2222.941925] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4218eb31-31a7-4733-9476-9c5aecee2df5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.951359] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2222.951359] env[63379]: value = "task-1780605" [ 2222.951359] env[63379]: _type = "Task" [ 2222.951359] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.959118] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780605, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.199953] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2223.200220] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2223.200377] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2223.200540] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2223.201549] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abce210-f7f1-447f-9d6a-6684d40d85eb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.211893] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a69f18d-c347-4345-916e-f6952ea6078f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.225788] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d233782f-283d-42d6-bfa1-4ebe729023a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.232284] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5ffa1a-f93e-44f7-a970-21fe124d8c74 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.260167] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181369MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2223.260315] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2223.261362] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2223.461647] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780605, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.962427] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780605, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.285789] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance e9de2a12-dd85-44ba-9066-324b3fc72d76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.285994] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2224.286159] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2224.310508] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125a3545-471a-422f-ad4b-c894aa042a26 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.318012] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f820d972-3886-4618-9b29-d4346db5ebf6 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.348118] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248af14a-f284-4bb8-8f6d-599c0cd90593 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.354821] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbe1678-8d9b-4669-a05c-abbe9abcfd00 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.367191] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2224.462854] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780605, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.871049] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2224.963185] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780605, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.375306] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2225.375546] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2225.464662] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780605, 'name': CloneVM_Task, 'duration_secs': 2.164603} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.465063] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Created linked-clone VM from snapshot [ 2225.465646] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b5cff7-4041-4d5b-bdf0-fd34eb8935af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.472652] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Uploading image 16bbc987-c8db-4621-bcd3-f719a23f3aa1 {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2225.496690] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2225.496690] env[63379]: value = "vm-369543" [ 2225.496690] env[63379]: _type = "VirtualMachine" [ 2225.496690] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2225.496953] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-67dae887-acf4-4ea0-980d-2fd632bc3356 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.503260] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease: (returnval){ [ 2225.503260] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b41e9f-5a7d-ce45-21de-9b97f006710c" [ 2225.503260] env[63379]: _type = "HttpNfcLease" [ 2225.503260] env[63379]: } obtained for exporting VM: (result){ [ 2225.503260] env[63379]: value = "vm-369543" [ 2225.503260] env[63379]: _type = "VirtualMachine" [ 2225.503260] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2225.503676] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the lease: (returnval){ [ 2225.503676] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b41e9f-5a7d-ce45-21de-9b97f006710c" [ 2225.503676] env[63379]: _type = "HttpNfcLease" [ 2225.503676] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2225.509098] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2225.509098] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b41e9f-5a7d-ce45-21de-9b97f006710c" [ 2225.509098] env[63379]: _type = "HttpNfcLease" [ 2225.509098] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2226.011985] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2226.011985] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b41e9f-5a7d-ce45-21de-9b97f006710c" [ 2226.011985] env[63379]: _type = "HttpNfcLease" [ 2226.011985] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2226.012329] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2226.012329] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52b41e9f-5a7d-ce45-21de-9b97f006710c" [ 2226.012329] env[63379]: _type = "HttpNfcLease" [ 2226.012329] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2226.013081] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e2b742-2a28-47fa-9bdf-5678c7adc02d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.020587] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de5767-e798-e36f-7c2f-836d76eeaeb9/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2226.020782] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de5767-e798-e36f-7c2f-836d76eeaeb9/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2226.109031] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5fffe7f0-2224-428d-9b6b-b181c41a10c0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.926254] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de5767-e798-e36f-7c2f-836d76eeaeb9/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2235.927262] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8500e5e1-2be4-41e8-ae64-ed44704709fc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.933569] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de5767-e798-e36f-7c2f-836d76eeaeb9/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2235.933719] env[63379]: ERROR oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de5767-e798-e36f-7c2f-836d76eeaeb9/disk-0.vmdk due to incomplete transfer. [ 2235.933936] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-80850560-8ffe-410c-a387-95cb577c3d3f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.940331] env[63379]: DEBUG oslo_vmware.rw_handles [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52de5767-e798-e36f-7c2f-836d76eeaeb9/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2235.940534] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Uploaded image 16bbc987-c8db-4621-bcd3-f719a23f3aa1 to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2235.942825] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2235.943063] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d22a6217-20a9-4f47-86b4-e219ad2aadbd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.948473] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2235.948473] env[63379]: value = "task-1780607" [ 2235.948473] env[63379]: _type = "Task" [ 2235.948473] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.955914] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780607, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.458016] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780607, 'name': Destroy_Task} progress is 33%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.959857] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780607, 'name': Destroy_Task, 'duration_secs': 0.541622} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2236.960251] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Destroyed the VM [ 2236.960380] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2236.960627] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3f4897ea-a11b-42b8-be09-1dffa1b8507a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.966466] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2236.966466] env[63379]: value = "task-1780608" [ 2236.966466] env[63379]: _type = "Task" [ 2236.966466] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2236.973399] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780608, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.477062] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780608, 'name': RemoveSnapshot_Task, 'duration_secs': 0.35436} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.477352] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2237.477636] env[63379]: DEBUG nova.compute.manager [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2237.478419] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1c8ae2-479d-4ab5-b078-f6aa0325d15f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.989736] env[63379]: INFO nova.compute.manager [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Shelve offloading [ 2237.991351] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2237.991625] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da867fd4-f528-4fa1-b4fd-46f4692a2489 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.999078] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2237.999078] env[63379]: value = "task-1780609" [ 2237.999078] env[63379]: _type = "Task" [ 2237.999078] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.006609] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780609, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.510103] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2238.510361] env[63379]: DEBUG nova.compute.manager [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2238.511081] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3dd293d-151e-4b01-8c45-b4ac7e875e38 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.516677] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2238.516846] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2238.517038] env[63379]: DEBUG nova.network.neutron [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2239.230453] env[63379]: DEBUG nova.network.neutron [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2239.733832] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2239.968786] env[63379]: DEBUG nova.compute.manager [req-a2830389-858d-43b0-a44f-2264fdf9a305 req-a3adf193-db4f-47fc-abf2-3a3f6a33ca3f service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-vif-unplugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2239.969035] env[63379]: DEBUG oslo_concurrency.lockutils [req-a2830389-858d-43b0-a44f-2264fdf9a305 req-a3adf193-db4f-47fc-abf2-3a3f6a33ca3f service nova] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.969475] env[63379]: DEBUG oslo_concurrency.lockutils [req-a2830389-858d-43b0-a44f-2264fdf9a305 req-a3adf193-db4f-47fc-abf2-3a3f6a33ca3f service nova] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2239.969475] env[63379]: DEBUG oslo_concurrency.lockutils [req-a2830389-858d-43b0-a44f-2264fdf9a305 req-a3adf193-db4f-47fc-abf2-3a3f6a33ca3f service nova] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2239.969621] env[63379]: DEBUG nova.compute.manager [req-a2830389-858d-43b0-a44f-2264fdf9a305 req-a3adf193-db4f-47fc-abf2-3a3f6a33ca3f service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] No waiting events found dispatching network-vif-unplugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2239.969787] env[63379]: WARNING nova.compute.manager [req-a2830389-858d-43b0-a44f-2264fdf9a305 req-a3adf193-db4f-47fc-abf2-3a3f6a33ca3f service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received unexpected event network-vif-unplugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a for instance with vm_state shelved and task_state shelving_offloading. [ 2240.061437] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2240.062406] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbfaf95d-4e76-4046-8094-9cf1ceac1891 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.069999] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2240.070249] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a90d0aab-3256-4297-8202-6a6987a83282 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.204259] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2240.204504] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2240.204699] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleting the datastore file [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2240.204981] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f2a774a-5024-40cc-81e5-c69ec4dfa6e9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.211041] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2240.211041] env[63379]: value = "task-1780611" [ 2240.211041] env[63379]: _type = "Task" [ 2240.211041] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.218270] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.722227] env[63379]: DEBUG oslo_vmware.api [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187816} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2240.722518] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2240.722678] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2240.722888] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2240.747021] env[63379]: INFO nova.scheduler.client.report [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted allocations for instance e9de2a12-dd85-44ba-9066-324b3fc72d76 [ 2241.252228] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2241.252493] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2241.252723] env[63379]: DEBUG nova.objects.instance [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'resources' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2241.755688] env[63379]: DEBUG nova.objects.instance [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'numa_topology' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2241.993453] env[63379]: DEBUG nova.compute.manager [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2241.993732] env[63379]: DEBUG nova.compute.manager [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing instance network info cache due to event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2241.994290] env[63379]: DEBUG oslo_concurrency.lockutils [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2241.994453] env[63379]: DEBUG oslo_concurrency.lockutils [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2241.994626] env[63379]: DEBUG nova.network.neutron [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2242.258138] env[63379]: DEBUG nova.objects.base [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2242.285131] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e6f966-34d1-45ab-9bba-1b022c5a4e1f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.292902] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2f0aff-0519-4aa5-897b-fbf5818cf19c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.321784] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affce0db-08d3-4845-aff9-075a63fc3394 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.328735] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ce7b35-53a9-4b22-b420-35f93723528f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.342241] env[63379]: DEBUG nova.compute.provider_tree [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2242.714320] env[63379]: DEBUG nova.network.neutron [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updated VIF entry in instance network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2242.714774] env[63379]: DEBUG nova.network.neutron [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapba0d9b39-04", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2242.771427] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2242.845622] env[63379]: DEBUG nova.scheduler.client.report [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2243.217772] env[63379]: DEBUG oslo_concurrency.lockutils [req-f63b329a-d4bc-49cf-a98b-6a7e67fbbc0b req-b11d9fda-7255-4d23-9032-87f04878fabf service nova] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2243.351406] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.859463] env[63379]: DEBUG oslo_concurrency.lockutils [None req-4a90c862-41b9-4395-a141-d94fe9c05846 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.508s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.860357] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.089s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2243.860546] env[63379]: INFO nova.compute.manager [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Unshelving [ 2244.870397] env[63379]: DEBUG nova.compute.utils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2245.374057] env[63379]: INFO nova.virt.block_device [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Booting with volume 918237ff-9426-42e7-9fab-daa95470f7e3 at /dev/sdb [ 2245.407743] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8366318-d50c-45c6-a4ff-9ec85231d2e4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.416819] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9181be-3e66-4394-ab18-388554a547b8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.440072] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db7e88c4-37c4-4ca0-b318-d89c8c66aea2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.447559] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdb8b1c-78a9-4a6e-a0ca-7f590a426cc0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.470912] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebdc84c-ed27-4226-8738-d520bd9d35e2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.476692] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2e1e68-29b5-46c7-b84c-26e2b2291bd8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.489327] env[63379]: DEBUG nova.virt.block_device [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating existing volume attachment record: 606c6c5b-1a84-439d-b2d4-bb275d7c0942 {{(pid=63379) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2251.081414] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.081804] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.081943] env[63379]: DEBUG nova.objects.instance [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'pci_requests' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2251.586052] env[63379]: DEBUG nova.objects.instance [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'numa_topology' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2252.088632] env[63379]: INFO nova.compute.claims [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2253.123786] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1548c60a-1332-417f-a9bf-1877bc8aeeff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.131484] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a481f67-c420-4dad-8024-ea6b0a96e886 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.161316] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a0a36b-5aae-43da-8cb7-5a05ae1d3aaa {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.168480] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef298c3c-6b09-4de8-9650-3676f90e6afc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.181695] env[63379]: DEBUG nova.compute.provider_tree [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2253.685310] env[63379]: DEBUG nova.scheduler.client.report [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2254.190736] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.109s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.219157] env[63379]: INFO nova.network.neutron [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating port ba0d9b39-04ad-4d23-bb55-cae60747bb6a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2255.592277] env[63379]: DEBUG nova.compute.manager [req-66a5dafe-2bd1-449d-82db-0c2e9f71d6f2 req-e8f329b0-9a6b-4e95-8108-1c1806889f73 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-vif-plugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2255.592630] env[63379]: DEBUG oslo_concurrency.lockutils [req-66a5dafe-2bd1-449d-82db-0c2e9f71d6f2 req-e8f329b0-9a6b-4e95-8108-1c1806889f73 service nova] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.592967] env[63379]: DEBUG oslo_concurrency.lockutils [req-66a5dafe-2bd1-449d-82db-0c2e9f71d6f2 req-e8f329b0-9a6b-4e95-8108-1c1806889f73 service nova] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.593216] env[63379]: DEBUG oslo_concurrency.lockutils [req-66a5dafe-2bd1-449d-82db-0c2e9f71d6f2 req-e8f329b0-9a6b-4e95-8108-1c1806889f73 service nova] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2255.593468] env[63379]: DEBUG nova.compute.manager [req-66a5dafe-2bd1-449d-82db-0c2e9f71d6f2 req-e8f329b0-9a6b-4e95-8108-1c1806889f73 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] No waiting events found dispatching network-vif-plugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2255.593688] env[63379]: WARNING nova.compute.manager [req-66a5dafe-2bd1-449d-82db-0c2e9f71d6f2 req-e8f329b0-9a6b-4e95-8108-1c1806889f73 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received unexpected event network-vif-plugged-ba0d9b39-04ad-4d23-bb55-cae60747bb6a for instance with vm_state shelved_offloaded and task_state spawning. [ 2255.678639] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2255.678860] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2255.679090] env[63379]: DEBUG nova.network.neutron [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2256.374089] env[63379]: DEBUG nova.network.neutron [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2256.876729] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2256.902791] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d80cb84bf6a744a1a83d4499ef26cf10',container_format='bare',created_at=2024-12-11T23:42:40Z,direct_url=,disk_format='vmdk',id=16bbc987-c8db-4621-bcd3-f719a23f3aa1,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1476100283-shelved',owner='36ebffe6565d46e48409834197213f5a',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-12-11T23:42:56Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2256.903059] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2256.903225] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2256.903418] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2256.903569] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2256.903721] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2256.903932] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2256.904135] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2256.904336] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2256.904505] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2256.904682] env[63379]: DEBUG nova.virt.hardware [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2256.905533] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519e6cf9-8f54-4a7f-9b88-f3e7769fc531 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.913215] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f5441a-fa1c-46bb-8481-dfc4ad4ca956 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.925979] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:dd:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba0d9b39-04ad-4d23-bb55-cae60747bb6a', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2256.933251] env[63379]: DEBUG oslo.service.loopingcall [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2256.933466] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2256.933657] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2de596b8-1ed3-4b4c-96cf-2f21c254716e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.951631] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2256.951631] env[63379]: value = "task-1780616" [ 2256.951631] env[63379]: _type = "Task" [ 2256.951631] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2256.958654] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780616, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.463308] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780616, 'name': CreateVM_Task, 'duration_secs': 0.319942} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.463532] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2257.464085] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2257.464283] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2257.464697] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2257.464945] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edefea60-85f5-42d0-850c-37e827650313 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.469385] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2257.469385] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5291979e-7b25-12db-4260-3ce97ade91e7" [ 2257.469385] env[63379]: _type = "Task" [ 2257.469385] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.476412] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5291979e-7b25-12db-4260-3ce97ade91e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.618561] env[63379]: DEBUG nova.compute.manager [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2257.618717] env[63379]: DEBUG nova.compute.manager [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing instance network info cache due to event network-changed-ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2257.618939] env[63379]: DEBUG oslo_concurrency.lockutils [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2257.619101] env[63379]: DEBUG oslo_concurrency.lockutils [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2257.619268] env[63379]: DEBUG nova.network.neutron [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Refreshing network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2257.980177] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2257.980564] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Processing image 16bbc987-c8db-4621-bcd3-f719a23f3aa1 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2257.980700] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1/16bbc987-c8db-4621-bcd3-f719a23f3aa1.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2257.980830] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1/16bbc987-c8db-4621-bcd3-f719a23f3aa1.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2257.981026] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2257.981285] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-804bfcf0-061b-4162-adf5-43e68b2e789a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.989376] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2257.989555] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2257.990212] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-449c73d9-32f1-4c7d-853b-d00721a92ad3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.994695] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2257.994695] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e16d50-5d94-5ffa-c061-2dcd8a8086ce" [ 2257.994695] env[63379]: _type = "Task" [ 2257.994695] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.001818] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e16d50-5d94-5ffa-c061-2dcd8a8086ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.320498] env[63379]: DEBUG nova.network.neutron [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updated VIF entry in instance network info cache for port ba0d9b39-04ad-4d23-bb55-cae60747bb6a. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2258.320872] env[63379]: DEBUG nova.network.neutron [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2258.504265] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Preparing fetch location {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2258.504512] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Fetch image to [datastore1] OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e/OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e.vmdk {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2258.504701] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Downloading stream optimized image 16bbc987-c8db-4621-bcd3-f719a23f3aa1 to [datastore1] OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e/OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e.vmdk on the data store datastore1 as vApp {{(pid=63379) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2258.504875] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Downloading image file data 16bbc987-c8db-4621-bcd3-f719a23f3aa1 to the ESX as VM named 'OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e' {{(pid=63379) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2258.568032] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2258.568032] env[63379]: value = "resgroup-9" [ 2258.568032] env[63379]: _type = "ResourcePool" [ 2258.568032] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2258.568354] env[63379]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d8fab8c3-5089-4671-8053-4a0c4d13bed7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.588763] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease: (returnval){ [ 2258.588763] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a80ca6-bc39-f7ce-4c4b-3e61e786819a" [ 2258.588763] env[63379]: _type = "HttpNfcLease" [ 2258.588763] env[63379]: } obtained for vApp import into resource pool (val){ [ 2258.588763] env[63379]: value = "resgroup-9" [ 2258.588763] env[63379]: _type = "ResourcePool" [ 2258.588763] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2258.589129] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the lease: (returnval){ [ 2258.589129] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a80ca6-bc39-f7ce-4c4b-3e61e786819a" [ 2258.589129] env[63379]: _type = "HttpNfcLease" [ 2258.589129] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2258.595071] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2258.595071] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a80ca6-bc39-f7ce-4c4b-3e61e786819a" [ 2258.595071] env[63379]: _type = "HttpNfcLease" [ 2258.595071] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2258.823765] env[63379]: DEBUG oslo_concurrency.lockutils [req-c995bfaa-65d8-4b77-945a-93c3e4064fda req-310ac8f9-9ce5-4634-b0af-048dbb8ff6f1 service nova] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2259.096938] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2259.096938] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a80ca6-bc39-f7ce-4c4b-3e61e786819a" [ 2259.096938] env[63379]: _type = "HttpNfcLease" [ 2259.096938] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2259.597837] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2259.597837] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a80ca6-bc39-f7ce-4c4b-3e61e786819a" [ 2259.597837] env[63379]: _type = "HttpNfcLease" [ 2259.597837] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2259.598245] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2259.598245] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52a80ca6-bc39-f7ce-4c4b-3e61e786819a" [ 2259.598245] env[63379]: _type = "HttpNfcLease" [ 2259.598245] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2259.598880] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d80672-2937-4233-b5bc-91337d6b3aba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.605778] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52adde6d-ca15-d81b-b456-54489441ea4b/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2259.605954] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52adde6d-ca15-d81b-b456-54489441ea4b/disk-0.vmdk. {{(pid=63379) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2259.671103] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bc4d6f9b-43fe-4fa5-a182-da80a64d17a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.779554] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Completed reading data from the image iterator. {{(pid=63379) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2260.779939] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52adde6d-ca15-d81b-b456-54489441ea4b/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2260.781071] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8011cc09-30e8-412b-a8aa-4f61d5b1da82 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.788748] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52adde6d-ca15-d81b-b456-54489441ea4b/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2260.788989] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52adde6d-ca15-d81b-b456-54489441ea4b/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2260.789299] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-841919a8-0f17-4144-87cc-43ce3b126f9e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.962821] env[63379]: DEBUG oslo_vmware.rw_handles [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52adde6d-ca15-d81b-b456-54489441ea4b/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2260.963104] env[63379]: INFO nova.virt.vmwareapi.images [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Downloaded image file data 16bbc987-c8db-4621-bcd3-f719a23f3aa1 [ 2260.963899] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434b2d68-b9bd-4ee5-a103-ef9dbf28f16c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.980715] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed8cd176-3754-43bc-beaa-4014d2c3205a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.010380] env[63379]: INFO nova.virt.vmwareapi.images [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] The imported VM was unregistered [ 2261.012725] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Caching image {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2261.012988] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating directory with path [datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2261.013274] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f3636b5-f85b-4ba0-a82c-117bf5dec4cd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.025321] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created directory with path [datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1 {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2261.025488] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e/OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e.vmdk to [datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1/16bbc987-c8db-4621-bcd3-f719a23f3aa1.vmdk. {{(pid=63379) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2261.025729] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-fbf8c638-9d78-4d6e-83bd-066414be6a73 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.031496] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2261.031496] env[63379]: value = "task-1780619" [ 2261.031496] env[63379]: _type = "Task" [ 2261.031496] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.038792] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780619, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.542202] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780619, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.042774] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780619, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.544223] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780619, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.045488] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780619, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.545222] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780619, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.197297} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.545502] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e/OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e.vmdk to [datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1/16bbc987-c8db-4621-bcd3-f719a23f3aa1.vmdk. [ 2263.545695] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Cleaning up location [datastore1] OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2263.545862] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c77abc8f-1cd2-448f-97ca-fa3abb26d76e {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2263.546133] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34bfa145-ca66-434c-88e1-200dc3ea0934 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.552736] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2263.552736] env[63379]: value = "task-1780620" [ 2263.552736] env[63379]: _type = "Task" [ 2263.552736] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.560249] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.062836] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032063} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.063274] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2264.063274] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1/16bbc987-c8db-4621-bcd3-f719a23f3aa1.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2264.063511] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1/16bbc987-c8db-4621-bcd3-f719a23f3aa1.vmdk to [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2264.063764] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c86bf8c7-46f8-4892-8006-4882116790ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.069804] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2264.069804] env[63379]: value = "task-1780621" [ 2264.069804] env[63379]: _type = "Task" [ 2264.069804] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.076843] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.580597] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780621, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.080414] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780621, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.580789] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780621, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.081762] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780621, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.582775] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780621, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.36277} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.583121] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/16bbc987-c8db-4621-bcd3-f719a23f3aa1/16bbc987-c8db-4621-bcd3-f719a23f3aa1.vmdk to [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2266.583959] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0328d995-3ec1-4cd8-ade1-111a3de57a17 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.604972] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2266.605223] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-912afa98-7d32-4f1e-8da6-9fcd8e919256 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.624246] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2266.624246] env[63379]: value = "task-1780622" [ 2266.624246] env[63379]: _type = "Task" [ 2266.624246] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.631283] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780622, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.134039] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780622, 'name': ReconfigVM_Task, 'duration_secs': 0.263821} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.134383] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfigured VM instance instance-0000007c to attach disk [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76/e9de2a12-dd85-44ba-9066-324b3fc72d76.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2267.135422] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'guest_format': None, 'device_type': 'disk', 'encryption_secret_uuid': None, 'boot_index': 0, 'encrypted': False, 'size': 0, 'device_name': '/dev/sda', 'disk_bus': None, 'encryption_options': None, 'image_id': 'd3d2d67c-c3e3-4e1e-9156-0c896c5b3d48'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369545', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'name': 'volume-918237ff-9426-42e7-9fab-daa95470f7e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'e9de2a12-dd85-44ba-9066-324b3fc72d76', 'attached_at': '', 'detached_at': '', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'serial': '918237ff-9426-42e7-9fab-daa95470f7e3'}, 'attachment_id': '606c6c5b-1a84-439d-b2d4-bb275d7c0942', 'boot_index': None, 'mount_device': '/dev/sdb', 'disk_bus': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=63379) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2267.135629] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Volume attach. Driver type: vmdk {{(pid=63379) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2267.135821] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369545', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'name': 'volume-918237ff-9426-42e7-9fab-daa95470f7e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'e9de2a12-dd85-44ba-9066-324b3fc72d76', 'attached_at': '', 'detached_at': '', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'serial': '918237ff-9426-42e7-9fab-daa95470f7e3'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2267.136586] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a806b91b-eb52-4a2e-b99d-0e6e29e5f30c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.151326] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a86d1e0-e435-4b23-889d-84c36ce42ece {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.175306] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-918237ff-9426-42e7-9fab-daa95470f7e3/volume-918237ff-9426-42e7-9fab-daa95470f7e3.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2267.175595] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d2453c3-a660-4cae-889f-cbaeda850b14 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.193324] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2267.193324] env[63379]: value = "task-1780623" [ 2267.193324] env[63379]: _type = "Task" [ 2267.193324] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.200366] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780623, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.703021] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780623, 'name': ReconfigVM_Task, 'duration_secs': 0.318422} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.703339] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-918237ff-9426-42e7-9fab-daa95470f7e3/volume-918237ff-9426-42e7-9fab-daa95470f7e3.vmdk or device None with type thin {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2267.708073] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6732fc74-6348-4ee9-a974-d04cf0ee4f81 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.722056] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2267.722056] env[63379]: value = "task-1780624" [ 2267.722056] env[63379]: _type = "Task" [ 2267.722056] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.729311] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.231343] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780624, 'name': ReconfigVM_Task, 'duration_secs': 0.134324} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.231775] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369545', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'name': 'volume-918237ff-9426-42e7-9fab-daa95470f7e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'e9de2a12-dd85-44ba-9066-324b3fc72d76', 'attached_at': '', 'detached_at': '', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'serial': '918237ff-9426-42e7-9fab-daa95470f7e3'} {{(pid=63379) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2268.232340] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1631cfc7-2aff-43ad-a4f9-d00f7b9a421c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.238668] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2268.238668] env[63379]: value = "task-1780625" [ 2268.238668] env[63379]: _type = "Task" [ 2268.238668] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.246207] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780625, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.748688] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780625, 'name': Rename_Task, 'duration_secs': 0.156026} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2268.748988] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2268.749241] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-043ad70c-29ec-4606-9779-9aa8086a5482 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.755126] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2268.755126] env[63379]: value = "task-1780626" [ 2268.755126] env[63379]: _type = "Task" [ 2268.755126] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.762126] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780626, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.265677] env[63379]: DEBUG oslo_vmware.api [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780626, 'name': PowerOnVM_Task, 'duration_secs': 0.450154} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.266138] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2269.389335] env[63379]: DEBUG nova.compute.manager [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2269.390371] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b6610d-96ac-44b2-ae75-a62fce554345 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.907287] env[63379]: DEBUG oslo_concurrency.lockutils [None req-c3b1d2f4-727d-4e89-9ba0-15257ddd5b02 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 26.047s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2279.392338] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2279.392630] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2279.898023] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2279.898328] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2279.898399] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 2280.464622] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2280.464833] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2280.464944] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2280.465117] env[63379]: DEBUG nova.objects.instance [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lazy-loading 'info_cache' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2282.183777] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [{"id": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "address": "fa:16:3e:df:dd:b9", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.182", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba0d9b39-04", "ovs_interfaceid": "ba0d9b39-04ad-4d23-bb55-cae60747bb6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2282.687094] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-e9de2a12-dd85-44ba-9066-324b3fc72d76" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.687347] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 2282.687526] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.687691] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.687846] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.687994] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.688158] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.688308] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.688438] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2282.688580] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.193064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.193064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.193064] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.193064] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2283.193803] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b1b3a3-1875-4beb-b8d2-47b3756d9390 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.202392] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a2c2fd-6022-4320-9900-59a703f0f32f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.216827] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9277e665-7662-4e69-bae9-136f47065e6b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.223798] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3736b4-7bef-4e8d-a46e-bebe8789e3ef {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.252511] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181196MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2283.252658] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.252839] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.278108] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance e9de2a12-dd85-44ba-9066-324b3fc72d76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2284.278434] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2284.278434] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2284.302557] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccd5e97-d128-4142-a793-869bcc497fe3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.310539] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df286fc-3bd4-4211-aa90-dee2652a1263 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.339471] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616b3c3f-3867-4b18-9011-ad4bb73dd816 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.346321] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df9bfe0-4749-4c3a-bd5e-aef5fa48ed6d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.359707] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2284.863187] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2285.368074] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2285.368459] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2306.464675] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2306.465127] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2306.968560] env[63379]: INFO nova.compute.manager [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Detaching volume 918237ff-9426-42e7-9fab-daa95470f7e3 [ 2306.998821] env[63379]: INFO nova.virt.block_device [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Attempting to driver detach volume 918237ff-9426-42e7-9fab-daa95470f7e3 from mountpoint /dev/sdb [ 2306.999105] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Volume detach. Driver type: vmdk {{(pid=63379) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2306.999310] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369545', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'name': 'volume-918237ff-9426-42e7-9fab-daa95470f7e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'e9de2a12-dd85-44ba-9066-324b3fc72d76', 'attached_at': '', 'detached_at': '', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'serial': '918237ff-9426-42e7-9fab-daa95470f7e3'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2307.000230] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c51d0b-2d98-4d45-9d1c-b138c1ccb88e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.021214] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3944552-4ec7-4879-ae0e-12528cf6e500 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.027579] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd969fef-c869-4aec-a614-134d49a9b94a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.046755] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d296a6ab-4dcb-48c0-9cf1-ae0e27e895fd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.061653] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] The volume has not been displaced from its original location: [datastore1] volume-918237ff-9426-42e7-9fab-daa95470f7e3/volume-918237ff-9426-42e7-9fab-daa95470f7e3.vmdk. No consolidation needed. {{(pid=63379) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2307.066824] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2307.067097] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0595aaed-4ea9-4fa3-b96c-8b744b2e11d1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.087921] env[63379]: DEBUG oslo_vmware.api [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2307.087921] env[63379]: value = "task-1780627" [ 2307.087921] env[63379]: _type = "Task" [ 2307.087921] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.095310] env[63379]: DEBUG oslo_vmware.api [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780627, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.597838] env[63379]: DEBUG oslo_vmware.api [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780627, 'name': ReconfigVM_Task, 'duration_secs': 0.225117} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2307.598265] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=63379) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2307.602806] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f6fd6a3-e61b-4b49-9cc8-26804ea5367a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.617504] env[63379]: DEBUG oslo_vmware.api [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2307.617504] env[63379]: value = "task-1780628" [ 2307.617504] env[63379]: _type = "Task" [ 2307.617504] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.625137] env[63379]: DEBUG oslo_vmware.api [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.127313] env[63379]: DEBUG oslo_vmware.api [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780628, 'name': ReconfigVM_Task, 'duration_secs': 0.13471} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.127638] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-369545', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'name': 'volume-918237ff-9426-42e7-9fab-daa95470f7e3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'e9de2a12-dd85-44ba-9066-324b3fc72d76', 'attached_at': '', 'detached_at': '', 'volume_id': '918237ff-9426-42e7-9fab-daa95470f7e3', 'serial': '918237ff-9426-42e7-9fab-daa95470f7e3'} {{(pid=63379) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2308.666828] env[63379]: DEBUG nova.objects.instance [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'flavor' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2309.676583] env[63379]: DEBUG oslo_concurrency.lockutils [None req-7f3231ed-dbde-4b2f-9860-ca799eeca357 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.211s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.698022] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.698374] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.698609] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.698799] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.698975] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.701273] env[63379]: INFO nova.compute.manager [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Terminating instance [ 2310.703103] env[63379]: DEBUG nova.compute.manager [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2310.703309] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2310.704146] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146c701d-e94c-49e3-803f-bb8b3940c708 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.711960] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2310.712197] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c16f3fc-ad67-406d-8896-1e4112e8a80a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.718795] env[63379]: DEBUG oslo_vmware.api [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2310.718795] env[63379]: value = "task-1780629" [ 2310.718795] env[63379]: _type = "Task" [ 2310.718795] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.726413] env[63379]: DEBUG oslo_vmware.api [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.228690] env[63379]: DEBUG oslo_vmware.api [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780629, 'name': PowerOffVM_Task, 'duration_secs': 0.171666} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.228975] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2311.229180] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2311.229417] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e22d759-0a03-4989-abcd-1d89133fa3bc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.300748] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2311.301112] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2311.301246] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleting the datastore file [datastore1] e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2311.301429] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1a4c59c-f337-4aaf-b9a6-36c2265a132d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.309276] env[63379]: DEBUG oslo_vmware.api [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2311.309276] env[63379]: value = "task-1780631" [ 2311.309276] env[63379]: _type = "Task" [ 2311.309276] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.320958] env[63379]: DEBUG oslo_vmware.api [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.818803] env[63379]: DEBUG oslo_vmware.api [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120358} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.819217] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2311.819262] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2311.819509] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2311.819740] env[63379]: INFO nova.compute.manager [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2311.820056] env[63379]: DEBUG oslo.service.loopingcall [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2311.820311] env[63379]: DEBUG nova.compute.manager [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2311.820434] env[63379]: DEBUG nova.network.neutron [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2312.279692] env[63379]: DEBUG nova.compute.manager [req-1afa57e5-d02e-4147-8968-b5899e388817 req-7e5d2a5d-9b5d-47aa-bc64-b056c03b7312 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Received event network-vif-deleted-ba0d9b39-04ad-4d23-bb55-cae60747bb6a {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2312.279929] env[63379]: INFO nova.compute.manager [req-1afa57e5-d02e-4147-8968-b5899e388817 req-7e5d2a5d-9b5d-47aa-bc64-b056c03b7312 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Neutron deleted interface ba0d9b39-04ad-4d23-bb55-cae60747bb6a; detaching it from the instance and deleting it from the info cache [ 2312.280131] env[63379]: DEBUG nova.network.neutron [req-1afa57e5-d02e-4147-8968-b5899e388817 req-7e5d2a5d-9b5d-47aa-bc64-b056c03b7312 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.730463] env[63379]: DEBUG nova.network.neutron [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.782463] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-598c1a2c-6122-4938-ac85-3b2b8f24e11d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.791933] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccf2afd-63bc-4d27-a51c-e5ae1c5bab0b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.815085] env[63379]: DEBUG nova.compute.manager [req-1afa57e5-d02e-4147-8968-b5899e388817 req-7e5d2a5d-9b5d-47aa-bc64-b056c03b7312 service nova] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Detach interface failed, port_id=ba0d9b39-04ad-4d23-bb55-cae60747bb6a, reason: Instance e9de2a12-dd85-44ba-9066-324b3fc72d76 could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2313.233322] env[63379]: INFO nova.compute.manager [-] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Took 1.41 seconds to deallocate network for instance. [ 2313.739970] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2313.740291] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.740560] env[63379]: DEBUG nova.objects.instance [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'resources' on Instance uuid e9de2a12-dd85-44ba-9066-324b3fc72d76 {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2314.274316] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64071a8e-ea3e-4816-a151-376757a54a7c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.281768] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd8b95d-a2bb-48fc-9c00-8e4591511a7b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.311912] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73df78b-ae53-416b-88ef-611b7c8e5773 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.318910] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190ddbb6-c957-439a-ae58-702c10968416 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.331544] env[63379]: DEBUG nova.compute.provider_tree [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2314.834941] env[63379]: DEBUG nova.scheduler.client.report [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2315.340936] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.361020] env[63379]: INFO nova.scheduler.client.report [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted allocations for instance e9de2a12-dd85-44ba-9066-324b3fc72d76 [ 2315.869386] env[63379]: DEBUG oslo_concurrency.lockutils [None req-26060254-00c4-4be8-95fd-bf28791dd5d6 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "e9de2a12-dd85-44ba-9066-324b3fc72d76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.170s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.536209] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.536500] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.039068] env[63379]: DEBUG nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Starting instance... {{(pid=63379) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2318.558104] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2318.558378] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.559900] env[63379]: INFO nova.compute.claims [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2319.595400] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9a2bc6-3ab2-47d0-ac19-f0f1b230673f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.603007] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ab162e-9b2d-4d0b-b347-3dc658317e6b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.634055] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74eeceb7-298c-4866-90ba-5bdb40f4a484 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.641007] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27701b51-9702-4e74-823a-c2fbcd24977b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.654741] env[63379]: DEBUG nova.compute.provider_tree [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2320.157854] env[63379]: DEBUG nova.scheduler.client.report [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2320.663457] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.105s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.664046] env[63379]: DEBUG nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Start building networks asynchronously for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2321.169041] env[63379]: DEBUG nova.compute.utils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Using /dev/sd instead of None {{(pid=63379) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2321.170594] env[63379]: DEBUG nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Allocating IP information in the background. {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2321.170801] env[63379]: DEBUG nova.network.neutron [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] allocate_for_instance() {{(pid=63379) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2321.218664] env[63379]: DEBUG nova.policy [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '14ebdcc952084f9e8c91614cca982f3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36ebffe6565d46e48409834197213f5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63379) authorize /opt/stack/nova/nova/policy.py:201}} [ 2321.482472] env[63379]: DEBUG nova.network.neutron [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Successfully created port: 36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2321.673532] env[63379]: DEBUG nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Start building block device mappings for instance. {{(pid=63379) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2322.684585] env[63379]: DEBUG nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Start spawning the instance on the hypervisor. {{(pid=63379) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2322.710741] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T23:13:50Z,direct_url=,disk_format='vmdk',id=d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='eb95d75934bc4912a35f709406a98a65',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T23:13:50Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2322.711041] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2322.711212] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2322.711417] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2322.711638] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2322.711725] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2322.711958] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2322.712152] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2322.712258] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2322.712425] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2322.712600] env[63379]: DEBUG nova.virt.hardware [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2322.713509] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ee82e4-8669-43d3-9719-479f033d96c9 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.720970] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de4651b-cffd-47d7-ab1c-c17ef1907724 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2322.854109] env[63379]: DEBUG nova.compute.manager [req-8cd3d28d-4cce-40b6-bed0-3135b2233750 req-a2c903ab-ef76-436d-b4d0-0e7dc19a2035 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-vif-plugged-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2322.854348] env[63379]: DEBUG oslo_concurrency.lockutils [req-8cd3d28d-4cce-40b6-bed0-3135b2233750 req-a2c903ab-ef76-436d-b4d0-0e7dc19a2035 service nova] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2322.854559] env[63379]: DEBUG oslo_concurrency.lockutils [req-8cd3d28d-4cce-40b6-bed0-3135b2233750 req-a2c903ab-ef76-436d-b4d0-0e7dc19a2035 service nova] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2322.854733] env[63379]: DEBUG oslo_concurrency.lockutils [req-8cd3d28d-4cce-40b6-bed0-3135b2233750 req-a2c903ab-ef76-436d-b4d0-0e7dc19a2035 service nova] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.854909] env[63379]: DEBUG nova.compute.manager [req-8cd3d28d-4cce-40b6-bed0-3135b2233750 req-a2c903ab-ef76-436d-b4d0-0e7dc19a2035 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] No waiting events found dispatching network-vif-plugged-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2322.856521] env[63379]: WARNING nova.compute.manager [req-8cd3d28d-4cce-40b6-bed0-3135b2233750 req-a2c903ab-ef76-436d-b4d0-0e7dc19a2035 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received unexpected event network-vif-plugged-36b0f993-b954-495e-9deb-0a3125b518eb for instance with vm_state building and task_state spawning. [ 2322.934102] env[63379]: DEBUG nova.network.neutron [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Successfully updated port: 36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2323.438033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2323.438033] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2323.438033] env[63379]: DEBUG nova.network.neutron [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2323.969083] env[63379]: DEBUG nova.network.neutron [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Instance cache missing network info. {{(pid=63379) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2324.089148] env[63379]: DEBUG nova.network.neutron [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2324.592063] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2324.592308] env[63379]: DEBUG nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Instance network_info: |[{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63379) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2324.592818] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:b1:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36b0f993-b954-495e-9deb-0a3125b518eb', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2324.600374] env[63379]: DEBUG oslo.service.loopingcall [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2324.600588] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2324.600811] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82c43dd8-ddb4-4971-a079-370424e0ba67 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.620935] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2324.620935] env[63379]: value = "task-1780632" [ 2324.620935] env[63379]: _type = "Task" [ 2324.620935] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2324.628205] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780632, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2324.884441] env[63379]: DEBUG nova.compute.manager [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2324.884673] env[63379]: DEBUG nova.compute.manager [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing instance network info cache due to event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2324.884897] env[63379]: DEBUG oslo_concurrency.lockutils [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2324.884965] env[63379]: DEBUG oslo_concurrency.lockutils [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2324.885193] env[63379]: DEBUG nova.network.neutron [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2325.131103] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780632, 'name': CreateVM_Task, 'duration_secs': 0.349194} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.131552] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2325.131940] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2325.132130] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2325.132451] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2325.132718] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-631e656e-2880-4576-a058-9c695cac93c1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.136862] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2325.136862] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52130d25-a892-cedb-be57-21194f13aadf" [ 2325.136862] env[63379]: _type = "Task" [ 2325.136862] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.145409] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52130d25-a892-cedb-be57-21194f13aadf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2325.563706] env[63379]: DEBUG nova.network.neutron [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updated VIF entry in instance network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2325.564090] env[63379]: DEBUG nova.network.neutron [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2325.647252] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52130d25-a892-cedb-be57-21194f13aadf, 'name': SearchDatastore_Task, 'duration_secs': 0.00942} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2325.647562] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2325.647794] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Processing image d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2325.648040] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2325.648195] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2325.648379] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2325.648628] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adb7c072-2682-4002-8e90-625582dc6862 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.656662] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2325.656837] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2325.657508] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49d210fa-44e3-4cef-844f-98eb73d98119 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.662561] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2325.662561] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524765f9-6977-75e4-ed79-e5df64debc61" [ 2325.662561] env[63379]: _type = "Task" [ 2325.662561] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2325.669900] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524765f9-6977-75e4-ed79-e5df64debc61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.066482] env[63379]: DEBUG oslo_concurrency.lockutils [req-afae6168-a24c-4367-acb7-a2532292c120 req-0e26df40-fb6b-4b42-8664-e8fe7c5424dd service nova] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2326.172766] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]524765f9-6977-75e4-ed79-e5df64debc61, 'name': SearchDatastore_Task, 'duration_secs': 0.008197} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.173538] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44b9fcf4-7043-4a76-857c-4ce2e602a83f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.178346] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2326.178346] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52878151-a88b-0e77-2f84-3ec19500d3f9" [ 2326.178346] env[63379]: _type = "Task" [ 2326.178346] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.185846] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52878151-a88b-0e77-2f84-3ec19500d3f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.691257] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52878151-a88b-0e77-2f84-3ec19500d3f9, 'name': SearchDatastore_Task, 'duration_secs': 0.009896} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.691689] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2326.692114] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2326.692497] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4a79531-0d91-4fe0-b337-07c9ac20bffe {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.700167] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2326.700167] env[63379]: value = "task-1780633" [ 2326.700167] env[63379]: _type = "Task" [ 2326.700167] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.710635] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.209753] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780633, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.380922} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.210209] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48/d3d2d67c-c3e3-4e1e-9156-0c896c5b3d48.vmdk to [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2327.210310] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Extending root virtual disk to 1048576 {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2327.210516] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-69c65a8e-9441-4f27-a25c-e10734839204 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.217272] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2327.217272] env[63379]: value = "task-1780634" [ 2327.217272] env[63379]: _type = "Task" [ 2327.217272] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.225290] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780634, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.727619] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780634, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.207082} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.727891] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Extended root virtual disk {{(pid=63379) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2327.728672] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd302f25-ca08-47dd-864f-2121c280b4c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.750158] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2327.750376] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9c94a3a-2366-43ab-b736-fba4250687ea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.770585] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2327.770585] env[63379]: value = "task-1780635" [ 2327.770585] env[63379]: _type = "Task" [ 2327.770585] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.777762] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.281144] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780635, 'name': ReconfigVM_Task, 'duration_secs': 0.275419} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.281588] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk or device None with type sparse {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2328.282022] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d155517-f443-425a-879c-3f8d5a7f0bc5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.288363] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2328.288363] env[63379]: value = "task-1780636" [ 2328.288363] env[63379]: _type = "Task" [ 2328.288363] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.295746] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780636, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.797935] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780636, 'name': Rename_Task, 'duration_secs': 0.137732} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.798235] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2328.798481] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c91b300-4b3c-439a-b283-0c6f2f692ad1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.805212] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2328.805212] env[63379]: value = "task-1780637" [ 2328.805212] env[63379]: _type = "Task" [ 2328.805212] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.812368] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.315546] env[63379]: DEBUG oslo_vmware.api [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780637, 'name': PowerOnVM_Task, 'duration_secs': 0.413131} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.316025] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2329.316025] env[63379]: INFO nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Took 6.63 seconds to spawn the instance on the hypervisor. [ 2329.316253] env[63379]: DEBUG nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2329.317033] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a0f74f-3ed8-4b07-81e4-d40714cccf8f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.835794] env[63379]: INFO nova.compute.manager [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Took 11.29 seconds to build instance. [ 2330.338679] env[63379]: DEBUG oslo_concurrency.lockutils [None req-6368348d-4038-4f51-85bc-0f482e2f107d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.802s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.664025] env[63379]: DEBUG nova.compute.manager [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2330.664025] env[63379]: DEBUG nova.compute.manager [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing instance network info cache due to event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2330.664025] env[63379]: DEBUG oslo_concurrency.lockutils [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2330.664025] env[63379]: DEBUG oslo_concurrency.lockutils [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.664025] env[63379]: DEBUG nova.network.neutron [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2331.366125] env[63379]: DEBUG nova.network.neutron [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updated VIF entry in instance network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2331.366587] env[63379]: DEBUG nova.network.neutron [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.869350] env[63379]: DEBUG oslo_concurrency.lockutils [req-083d60db-1d75-45eb-a83f-c03552fca0f8 req-66c20648-2c3a-493b-b2c4-72e2f6928cbf service nova] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2345.370222] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.370537] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.370668] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2345.370786] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 2345.899773] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2345.899933] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2345.900102] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Forcefully refreshing network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2345.900260] env[63379]: DEBUG nova.objects.instance [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lazy-loading 'info_cache' on Instance uuid 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2347.616633] env[63379]: DEBUG nova.network.neutron [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2348.119775] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2348.119985] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updated the network info_cache for instance {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10045}} [ 2348.120215] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.120377] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.120524] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.120670] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.120812] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.120976] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.121136] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2348.121292] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.624657] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2348.625061] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2348.625061] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2348.625212] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2348.626067] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5a05dd-46ab-4203-8856-d1bcadb9857a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.634831] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683c630a-2829-4e7d-b8fb-96b7c07a7930 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.648591] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ea93cf-a1a5-42e8-8314-f141acb51809 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.654876] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0517eaff-d362-4f18-8868-bd4fddba8d57 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.683202] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181157MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2348.683396] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2348.683530] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2349.707128] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 16498ce6-8961-408b-8d2a-c61f83f5a56f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2349.707400] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2349.707489] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2349.723570] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing inventories for resource provider cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2349.734772] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating ProviderTree inventory for provider cf478c89-515f-4372-b90f-4868ab56e978 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2349.734996] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Updating inventory in ProviderTree for provider cf478c89-515f-4372-b90f-4868ab56e978 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2349.744698] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing aggregate associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, aggregates: None {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2349.760685] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Refreshing trait associations for resource provider cf478c89-515f-4372-b90f-4868ab56e978, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63379) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2349.782640] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd16d97-9ae7-4b10-b57d-614230ea0a94 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.790380] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7addc9bc-aa7c-4e87-a4a5-39780e554c21 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.819668] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9045e171-b70c-47e2-9f08-2cb2133400ba {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.826488] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67dd54bc-c828-475d-a6f6-211b823a43b2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2349.839060] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2350.342167] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2350.847366] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2350.847723] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.164s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2369.086669] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2369.087090] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2369.087148] env[63379]: INFO nova.compute.manager [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Shelving [ 2369.594916] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2369.595211] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3221a064-48ef-4507-af10-b0b5c305a965 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.602618] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2369.602618] env[63379]: value = "task-1780638" [ 2369.602618] env[63379]: _type = "Task" [ 2369.602618] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2369.610976] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2370.112500] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780638, 'name': PowerOffVM_Task, 'duration_secs': 0.179841} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2370.112883] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2370.113493] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15524adb-aabb-4e93-a667-8709ccc32765 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.131580] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e25e2f-064f-44b1-83dd-67ed781972c8 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.641712] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Creating Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2370.642040] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-af379897-a2c3-47b1-b6eb-b3265f311953 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.649854] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2370.649854] env[63379]: value = "task-1780639" [ 2370.649854] env[63379]: _type = "Task" [ 2370.649854] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2370.658151] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780639, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2371.159956] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780639, 'name': CreateSnapshot_Task, 'duration_secs': 0.397954} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2371.160296] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Created Snapshot of the VM instance {{(pid=63379) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2371.160938] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1647207b-50db-4798-a88b-0f62b2e53ab7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.679025] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Creating linked-clone VM from snapshot {{(pid=63379) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2371.679299] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-517a21f9-8874-4ae5-a699-5e0567160522 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.691451] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2371.691451] env[63379]: value = "task-1780640" [ 2371.691451] env[63379]: _type = "Task" [ 2371.691451] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.699924] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780640, 'name': CloneVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.202379] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780640, 'name': CloneVM_Task} progress is 94%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2372.704499] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780640, 'name': CloneVM_Task} progress is 95%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2373.203483] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780640, 'name': CloneVM_Task, 'duration_secs': 1.105483} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2373.203888] env[63379]: INFO nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Created linked-clone VM from snapshot [ 2373.204502] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b096bc80-b558-48a1-ac91-8090adf1f991 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.211532] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Uploading image 5fc4df29-a2ea-4dd2-b092-b9c91df2334b {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2373.231499] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2373.231499] env[63379]: value = "vm-369550" [ 2373.231499] env[63379]: _type = "VirtualMachine" [ 2373.231499] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2373.231743] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9ef51ffa-78d1-47ea-981c-099e60febd6b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.238617] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease: (returnval){ [ 2373.238617] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0f9b5-e617-dd2d-21fd-9f9a831f9524" [ 2373.238617] env[63379]: _type = "HttpNfcLease" [ 2373.238617] env[63379]: } obtained for exporting VM: (result){ [ 2373.238617] env[63379]: value = "vm-369550" [ 2373.238617] env[63379]: _type = "VirtualMachine" [ 2373.238617] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2373.238913] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the lease: (returnval){ [ 2373.238913] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0f9b5-e617-dd2d-21fd-9f9a831f9524" [ 2373.238913] env[63379]: _type = "HttpNfcLease" [ 2373.238913] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2373.244264] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2373.244264] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0f9b5-e617-dd2d-21fd-9f9a831f9524" [ 2373.244264] env[63379]: _type = "HttpNfcLease" [ 2373.244264] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2373.746953] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2373.746953] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0f9b5-e617-dd2d-21fd-9f9a831f9524" [ 2373.746953] env[63379]: _type = "HttpNfcLease" [ 2373.746953] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2373.747268] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2373.747268] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52e0f9b5-e617-dd2d-21fd-9f9a831f9524" [ 2373.747268] env[63379]: _type = "HttpNfcLease" [ 2373.747268] env[63379]: }. {{(pid=63379) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2373.747973] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6916d307-7d77-4558-9192-3f4bb59c6d11 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2373.754704] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b4be7c-8c6f-8fc9-ee7a-e88fe8f195b1/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2373.754896] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b4be7c-8c6f-8fc9-ee7a-e88fe8f195b1/disk-0.vmdk for reading. {{(pid=63379) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2373.843869] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b1340a57-fdde-464e-b676-dd7ef7dfcc4b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.196867] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2374.197120] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Getting list of instances from cluster (obj){ [ 2374.197120] env[63379]: value = "domain-c8" [ 2374.197120] env[63379]: _type = "ClusterComputeResource" [ 2374.197120] env[63379]: } {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2374.198321] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e264481-a16a-4947-a716-2e1b0625cbff {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.208483] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Got total of 1 instances {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2380.475611] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.476041] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.476041] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.979112] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.979474] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.980526] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.980721] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2380.981656] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7400d06-355d-4a45-b403-757f4ea38f03 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.990416] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d1f347-d93d-44ac-9664-43a69afcd7af {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.004281] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae437ddc-4445-4a55-9a40-9212b0b94a7d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.010466] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6545d7-54d4-4154-a131-1a6a0a7addea {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.040440] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181180MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2381.040657] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2381.040871] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2381.062989] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b4be7c-8c6f-8fc9-ee7a-e88fe8f195b1/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2381.063949] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032de6ba-5f8c-482a-b28c-17dffe688033 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.069982] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b4be7c-8c6f-8fc9-ee7a-e88fe8f195b1/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2381.070181] env[63379]: ERROR oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b4be7c-8c6f-8fc9-ee7a-e88fe8f195b1/disk-0.vmdk due to incomplete transfer. [ 2381.070400] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-20b9b943-2ad7-4511-88f4-ca8b4626e748 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.077569] env[63379]: DEBUG oslo_vmware.rw_handles [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b4be7c-8c6f-8fc9-ee7a-e88fe8f195b1/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2381.077766] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Uploaded image 5fc4df29-a2ea-4dd2-b092-b9c91df2334b to the Glance image server {{(pid=63379) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2381.079914] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Destroying the VM {{(pid=63379) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2381.080159] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a5afea3a-42f2-4a70-8331-c1f55e9507de {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.086068] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2381.086068] env[63379]: value = "task-1780642" [ 2381.086068] env[63379]: _type = "Task" [ 2381.086068] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.093316] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780642, 'name': Destroy_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.595334] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780642, 'name': Destroy_Task, 'duration_secs': 0.371357} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2381.597046] env[63379]: INFO nova.virt.vmwareapi.vm_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Destroyed the VM [ 2381.597046] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Deleting Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2381.597046] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cfb3acd1-de01-43b5-91ae-3a437c2fd759 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.602480] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2381.602480] env[63379]: value = "task-1780643" [ 2381.602480] env[63379]: _type = "Task" [ 2381.602480] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.609447] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780643, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.068112] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 16498ce6-8961-408b-8d2a-c61f83f5a56f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2382.068326] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2382.068475] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2382.092911] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57df9fe3-9330-4b5c-90b5-26f3fb794d54 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.100452] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf83d1b-ef01-4c7f-aacd-9dd85fa41bf4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.111363] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780643, 'name': RemoveSnapshot_Task, 'duration_secs': 0.402791} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.135640] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Deleted Snapshot of the VM instance {{(pid=63379) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2382.135938] env[63379]: DEBUG nova.compute.manager [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2382.136843] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802e5cd4-0a99-4cfe-bec9-cf818973e3c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.139641] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ac9546-64f4-4ad2-b643-dae9e1e50be0 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.150643] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0072688-3a29-46e1-af7d-8280d4ca3793 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.163710] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2382.653144] env[63379]: INFO nova.compute.manager [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Shelve offloading [ 2382.654832] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2382.655101] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-167d109e-e609-4764-8016-0be82e77391b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.662200] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2382.662200] env[63379]: value = "task-1780644" [ 2382.662200] env[63379]: _type = "Task" [ 2382.662200] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.666133] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2382.672059] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2383.175016] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2383.175222] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.134s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2383.175688] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] VM already powered off {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2383.175922] env[63379]: DEBUG nova.compute.manager [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2383.176636] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c702c1-4511-47e4-b9d3-9022c64d258a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.181861] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2383.182032] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2383.182209] env[63379]: DEBUG nova.network.neutron [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2383.663720] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2383.664131] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2383.664131] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2383.882443] env[63379]: DEBUG nova.network.neutron [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2384.385016] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2384.641911] env[63379]: DEBUG nova.compute.manager [req-f3414419-6c10-494f-8e5b-95111e112025 req-7e1ed072-0a1d-4ed8-b0cd-67765174b9b7 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-vif-unplugged-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2384.642261] env[63379]: DEBUG oslo_concurrency.lockutils [req-f3414419-6c10-494f-8e5b-95111e112025 req-7e1ed072-0a1d-4ed8-b0cd-67765174b9b7 service nova] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2384.642518] env[63379]: DEBUG oslo_concurrency.lockutils [req-f3414419-6c10-494f-8e5b-95111e112025 req-7e1ed072-0a1d-4ed8-b0cd-67765174b9b7 service nova] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2384.642736] env[63379]: DEBUG oslo_concurrency.lockutils [req-f3414419-6c10-494f-8e5b-95111e112025 req-7e1ed072-0a1d-4ed8-b0cd-67765174b9b7 service nova] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2384.643506] env[63379]: DEBUG nova.compute.manager [req-f3414419-6c10-494f-8e5b-95111e112025 req-7e1ed072-0a1d-4ed8-b0cd-67765174b9b7 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] No waiting events found dispatching network-vif-unplugged-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2384.643506] env[63379]: WARNING nova.compute.manager [req-f3414419-6c10-494f-8e5b-95111e112025 req-7e1ed072-0a1d-4ed8-b0cd-67765174b9b7 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received unexpected event network-vif-unplugged-36b0f993-b954-495e-9deb-0a3125b518eb for instance with vm_state shelved and task_state shelving_offloading. [ 2384.739855] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2384.740802] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b439fa8-9339-40c8-8390-cebacf949db5 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.748581] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2384.748814] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2980c2dd-3016-499c-8aaa-f83909a0c37a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.817374] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2384.817658] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2384.817924] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleting the datastore file [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2384.818245] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73ec87c2-6643-4731-8013-581c533089dd {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.824926] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2384.824926] env[63379]: value = "task-1780646" [ 2384.824926] env[63379]: _type = "Task" [ 2384.824926] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2384.833650] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.334952] env[63379]: DEBUG oslo_vmware.api [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127822} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2385.335294] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2385.335524] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2385.335746] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2385.356009] env[63379]: INFO nova.scheduler.client.report [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted allocations for instance 16498ce6-8961-408b-8d2a-c61f83f5a56f [ 2385.860992] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2385.861262] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2385.861374] env[63379]: DEBUG nova.objects.instance [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'resources' on Instance uuid 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2386.363754] env[63379]: DEBUG nova.objects.instance [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'numa_topology' on Instance uuid 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2386.667922] env[63379]: DEBUG nova.compute.manager [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2386.668159] env[63379]: DEBUG nova.compute.manager [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing instance network info cache due to event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2386.668375] env[63379]: DEBUG oslo_concurrency.lockutils [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2386.668523] env[63379]: DEBUG oslo_concurrency.lockutils [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2386.668690] env[63379]: DEBUG nova.network.neutron [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2386.866842] env[63379]: DEBUG nova.objects.base [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Object Instance<16498ce6-8961-408b-8d2a-c61f83f5a56f> lazy-loaded attributes: resources,numa_topology {{(pid=63379) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2386.891317] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcae75e6-e914-44cb-ad43-6aea40dc02ab {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.898718] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a990cbc-ddce-4f9c-aae9-387c641cde71 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.927548] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15462286-4617-47fc-950c-fe19daa41c1f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.934611] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcdd0e5-a4e7-4063-9ce2-c4943f1e3ea1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.948814] env[63379]: DEBUG nova.compute.provider_tree [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2387.378527] env[63379]: DEBUG nova.network.neutron [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updated VIF entry in instance network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2387.378886] env[63379]: DEBUG nova.network.neutron [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap36b0f993-b9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2387.452149] env[63379]: DEBUG nova.scheduler.client.report [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2387.646988] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2387.881750] env[63379]: DEBUG oslo_concurrency.lockutils [req-7c49dc56-9449-4456-86a4-048f7e0e7ead req-0a8cff06-5a45-452b-a263-a1808092b16e service nova] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2387.956405] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.095s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2388.464492] env[63379]: DEBUG oslo_concurrency.lockutils [None req-9c6f7152-c874-4aea-8cdd-e94f9afc3e4d tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.377s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2388.465296] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.818s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2388.465384] env[63379]: INFO nova.compute.manager [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Unshelving [ 2388.964203] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.964589] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.492041] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2389.492342] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2389.492563] env[63379]: DEBUG nova.objects.instance [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'pci_requests' on Instance uuid 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2389.964692] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.965133] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2389.965133] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 2389.996078] env[63379]: DEBUG nova.objects.instance [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'numa_topology' on Instance uuid 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2390.467886] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 2390.499079] env[63379]: INFO nova.compute.claims [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2391.535129] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fdd137-464b-46be-b9be-16340e89b72f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.542910] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aaad700-ef1e-47d0-9beb-b9f322167851 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.571655] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a793c4fc-75cb-4a32-9d23-b82a05413856 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.578075] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd650687-c0d9-402c-a876-a6cb618599a1 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.590369] env[63379]: DEBUG nova.compute.provider_tree [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2392.094251] env[63379]: DEBUG nova.scheduler.client.report [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2392.599640] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.107s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2392.628174] env[63379]: INFO nova.network.neutron [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating port 36b0f993-b954-495e-9deb-0a3125b518eb with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2394.009207] env[63379]: DEBUG nova.compute.manager [req-2484d18d-2394-4772-85c6-0d51618e3888 req-e7186793-cab9-4e62-acfc-ad37ca699e7f service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-vif-plugged-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2394.009461] env[63379]: DEBUG oslo_concurrency.lockutils [req-2484d18d-2394-4772-85c6-0d51618e3888 req-e7186793-cab9-4e62-acfc-ad37ca699e7f service nova] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2394.009652] env[63379]: DEBUG oslo_concurrency.lockutils [req-2484d18d-2394-4772-85c6-0d51618e3888 req-e7186793-cab9-4e62-acfc-ad37ca699e7f service nova] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2394.009812] env[63379]: DEBUG oslo_concurrency.lockutils [req-2484d18d-2394-4772-85c6-0d51618e3888 req-e7186793-cab9-4e62-acfc-ad37ca699e7f service nova] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2394.009984] env[63379]: DEBUG nova.compute.manager [req-2484d18d-2394-4772-85c6-0d51618e3888 req-e7186793-cab9-4e62-acfc-ad37ca699e7f service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] No waiting events found dispatching network-vif-plugged-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2394.010391] env[63379]: WARNING nova.compute.manager [req-2484d18d-2394-4772-85c6-0d51618e3888 req-e7186793-cab9-4e62-acfc-ad37ca699e7f service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received unexpected event network-vif-plugged-36b0f993-b954-495e-9deb-0a3125b518eb for instance with vm_state shelved_offloaded and task_state spawning. [ 2394.097026] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2394.097203] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2394.097379] env[63379]: DEBUG nova.network.neutron [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Building network info cache for instance {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2394.462915] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.796645] env[63379]: DEBUG nova.network.neutron [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2394.969453] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2395.299796] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2395.325938] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T23:14:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='f28d506e6a029cc0ea298d934e6e7280',container_format='bare',created_at=2024-12-11T23:45:08Z,direct_url=,disk_format='vmdk',id=5fc4df29-a2ea-4dd2-b092-b9c91df2334b,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1869889611-shelved',owner='36ebffe6565d46e48409834197213f5a',properties=ImageMetaProps,protected=,size=31664128,status='active',tags=,updated_at=2024-12-11T23:45:21Z,virtual_size=,visibility=), allow threads: False {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2395.326212] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2395.326374] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image limits 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2395.326566] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Flavor pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2395.326716] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Image pref 0:0:0 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2395.326868] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63379) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2395.327087] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2395.327253] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2395.327419] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Got 1 possible topologies {{(pid=63379) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2395.327580] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2395.327755] env[63379]: DEBUG nova.virt.hardware [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63379) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2395.328935] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29afd59-b19c-4ab0-9d8e-a14b38a5dc07 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.336835] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9de344-46d5-4948-96aa-2c960e0f7f62 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.349938] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:b1:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36b0f993-b954-495e-9deb-0a3125b518eb', 'vif_model': 'vmxnet3'}] {{(pid=63379) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2395.357348] env[63379]: DEBUG oslo.service.loopingcall [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2395.357586] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Creating VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2395.357793] env[63379]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2d20565-07fa-453a-8d79-9648d59742a2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.376894] env[63379]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2395.376894] env[63379]: value = "task-1780647" [ 2395.376894] env[63379]: _type = "Task" [ 2395.376894] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2395.385225] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780647, 'name': CreateVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2395.886702] env[63379]: DEBUG oslo_vmware.api [-] Task: {'id': task-1780647, 'name': CreateVM_Task, 'duration_secs': 0.35157} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2395.886910] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Created VM on the ESX host {{(pid=63379) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2395.887504] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2395.887693] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2395.888110] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2395.888367] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e5e0ba2-63de-4b7c-8b31-d976fa5c6d8b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.892727] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2395.892727] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aca94b-b7d4-65c0-d65c-edd303418a4a" [ 2395.892727] env[63379]: _type = "Task" [ 2395.892727] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2395.899869] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]52aca94b-b7d4-65c0-d65c-edd303418a4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2396.033943] env[63379]: DEBUG nova.compute.manager [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2396.034190] env[63379]: DEBUG nova.compute.manager [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing instance network info cache due to event network-changed-36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11182}} [ 2396.034411] env[63379]: DEBUG oslo_concurrency.lockutils [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] Acquiring lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2396.034559] env[63379]: DEBUG oslo_concurrency.lockutils [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] Acquired lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2396.034727] env[63379]: DEBUG nova.network.neutron [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Refreshing network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2396.402107] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2396.402507] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Processing image 5fc4df29-a2ea-4dd2-b092-b9c91df2334b {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2396.402678] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b/5fc4df29-a2ea-4dd2-b092-b9c91df2334b.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2396.402835] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquired lock "[datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b/5fc4df29-a2ea-4dd2-b092-b9c91df2334b.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2396.403028] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2396.403281] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b842e33-db91-420f-b846-e92cf1f81633 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.420120] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2396.420299] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63379) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2396.420958] env[63379]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb567613-7623-40c7-a9e6-aa8db8269cee {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.425525] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2396.425525] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]5290bf6e-25bf-414d-7575-21f1d500e0be" [ 2396.425525] env[63379]: _type = "Task" [ 2396.425525] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2396.433584] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': session[526a9413-5212-9a2d-b527-6a96915ebc5a]5290bf6e-25bf-414d-7575-21f1d500e0be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2396.764887] env[63379]: DEBUG nova.network.neutron [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updated VIF entry in instance network info cache for port 36b0f993-b954-495e-9deb-0a3125b518eb. {{(pid=63379) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2396.765296] env[63379]: DEBUG nova.network.neutron [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [{"id": "36b0f993-b954-495e-9deb-0a3125b518eb", "address": "fa:16:3e:54:b1:bf", "network": {"id": "d10d49b9-7fd3-415b-8e53-f56c79be48c5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-59040310-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.175", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36ebffe6565d46e48409834197213f5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36b0f993-b9", "ovs_interfaceid": "36b0f993-b954-495e-9deb-0a3125b518eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2396.936118] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Preparing fetch location {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2396.936289] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Fetch image to [datastore1] OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37/OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37.vmdk {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2396.936355] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Downloading stream optimized image 5fc4df29-a2ea-4dd2-b092-b9c91df2334b to [datastore1] OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37/OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37.vmdk on the data store datastore1 as vApp {{(pid=63379) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2396.936521] env[63379]: DEBUG nova.virt.vmwareapi.images [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Downloading image file data 5fc4df29-a2ea-4dd2-b092-b9c91df2334b to the ESX as VM named 'OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37' {{(pid=63379) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2397.000929] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2397.000929] env[63379]: value = "resgroup-9" [ 2397.000929] env[63379]: _type = "ResourcePool" [ 2397.000929] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2397.001232] env[63379]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-8ce6c3da-c2a8-4047-aa92-8dca001602f7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.022300] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease: (returnval){ [ 2397.022300] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524079b3-1e7b-ff8e-9ba1-c2b01f27f18a" [ 2397.022300] env[63379]: _type = "HttpNfcLease" [ 2397.022300] env[63379]: } obtained for vApp import into resource pool (val){ [ 2397.022300] env[63379]: value = "resgroup-9" [ 2397.022300] env[63379]: _type = "ResourcePool" [ 2397.022300] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2397.022705] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the lease: (returnval){ [ 2397.022705] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524079b3-1e7b-ff8e-9ba1-c2b01f27f18a" [ 2397.022705] env[63379]: _type = "HttpNfcLease" [ 2397.022705] env[63379]: } to be ready. {{(pid=63379) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2397.030765] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2397.030765] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524079b3-1e7b-ff8e-9ba1-c2b01f27f18a" [ 2397.030765] env[63379]: _type = "HttpNfcLease" [ 2397.030765] env[63379]: } is initializing. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2397.267760] env[63379]: DEBUG oslo_concurrency.lockutils [req-c376f536-dbd3-4914-be94-f1f6b8bc1c65 req-21eee922-48cc-4f8d-a223-b1d2f7d60a06 service nova] Releasing lock "refresh_cache-16498ce6-8961-408b-8d2a-c61f83f5a56f" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2397.531159] env[63379]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2397.531159] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524079b3-1e7b-ff8e-9ba1-c2b01f27f18a" [ 2397.531159] env[63379]: _type = "HttpNfcLease" [ 2397.531159] env[63379]: } is ready. {{(pid=63379) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2397.531661] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2397.531661] env[63379]: value = "session[526a9413-5212-9a2d-b527-6a96915ebc5a]524079b3-1e7b-ff8e-9ba1-c2b01f27f18a" [ 2397.531661] env[63379]: _type = "HttpNfcLease" [ 2397.531661] env[63379]: }. {{(pid=63379) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2397.532208] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cb913d-07dc-42f0-8da9-c6a78bc33739 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.539383] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527923d5-aca8-ff90-aea0-3d85ea683999/disk-0.vmdk from lease info. {{(pid=63379) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2397.539569] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating HTTP connection to write to file with size = 31664128 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527923d5-aca8-ff90-aea0-3d85ea683999/disk-0.vmdk. {{(pid=63379) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2397.602846] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-98797fc9-4af3-4492-8a16-c70a7f699f47 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.640368] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Completed reading data from the image iterator. {{(pid=63379) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2398.640845] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527923d5-aca8-ff90-aea0-3d85ea683999/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2398.641817] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed13e43a-4b01-4f62-b8d9-ce137e281136 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.649381] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527923d5-aca8-ff90-aea0-3d85ea683999/disk-0.vmdk is in state: ready. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2398.649572] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527923d5-aca8-ff90-aea0-3d85ea683999/disk-0.vmdk. {{(pid=63379) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2398.649801] env[63379]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-0e0294d2-626f-4311-9511-a0f1a60ca39a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.837864] env[63379]: DEBUG oslo_vmware.rw_handles [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527923d5-aca8-ff90-aea0-3d85ea683999/disk-0.vmdk. {{(pid=63379) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2398.838113] env[63379]: INFO nova.virt.vmwareapi.images [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Downloaded image file data 5fc4df29-a2ea-4dd2-b092-b9c91df2334b [ 2398.838945] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753104ed-1922-4a26-9610-17380125d3a4 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.853790] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6d8abe9-bb4e-40ba-a1ef-19a07d340b77 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.884196] env[63379]: INFO nova.virt.vmwareapi.images [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] The imported VM was unregistered [ 2398.886433] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Caching image {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2398.886675] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Creating directory with path [datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2398.886936] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e484a194-7579-4f99-9f22-292cd261393c {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.907234] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Created directory with path [datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b {{(pid=63379) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2398.907390] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37/OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37.vmdk to [datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b/5fc4df29-a2ea-4dd2-b092-b9c91df2334b.vmdk. {{(pid=63379) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2398.907626] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5a0d6177-b18b-4d9f-a425-19b886282f50 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.914200] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2398.914200] env[63379]: value = "task-1780650" [ 2398.914200] env[63379]: _type = "Task" [ 2398.914200] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.921638] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780650, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2399.425815] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780650, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2399.926133] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780650, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.426636] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780650, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2400.928023] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780650, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.426574] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780650, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.257257} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2401.426809] env[63379]: INFO nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37/OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37.vmdk to [datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b/5fc4df29-a2ea-4dd2-b092-b9c91df2334b.vmdk. [ 2401.426987] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Cleaning up location [datastore1] OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37 {{(pid=63379) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2401.427178] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9b9c2558-af27-4c59-8c31-2605e4a29a37 {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2401.427422] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f752324-c94b-466f-9d65-50a60a4c2a25 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.433824] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2401.433824] env[63379]: value = "task-1780651" [ 2401.433824] env[63379]: _type = "Task" [ 2401.433824] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2401.442668] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.944155] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.049471} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2401.944485] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2401.944574] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Releasing lock "[datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b/5fc4df29-a2ea-4dd2-b092-b9c91df2334b.vmdk" {{(pid=63379) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2401.944840] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b/5fc4df29-a2ea-4dd2-b092-b9c91df2334b.vmdk to [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2401.945102] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb414de9-0051-44de-bdc5-db2b9f86360e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2401.951130] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2401.951130] env[63379]: value = "task-1780652" [ 2401.951130] env[63379]: _type = "Task" [ 2401.951130] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2401.958499] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2401.964040] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2402.464713] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780652, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2402.962283] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780652, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.464628] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780652, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.466334] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.466565] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11274}} [ 2403.965851] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780652, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.971709] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] There are 7 instances to clean {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11283}} [ 2403.971935] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: e9de2a12-dd85-44ba-9066-324b3fc72d76] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2404.466816] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780652, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2404.475526] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 021d4755-9144-43c7-8c86-f167b7b294e4] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2404.966627] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780652, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.65994} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.967036] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/5fc4df29-a2ea-4dd2-b092-b9c91df2334b/5fc4df29-a2ea-4dd2-b092-b9c91df2334b.vmdk to [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk {{(pid=63379) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2404.967691] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88adfe2-38d2-46cb-aff1-5aa6754a14cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.980530] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: dc3117be-09c2-445e-a575-ff588d94238a] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2404.990617] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2404.991081] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e30a453c-4e49-440c-92a9-589cdd6385c2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.012851] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2405.012851] env[63379]: value = "task-1780653" [ 2405.012851] env[63379]: _type = "Task" [ 2405.012851] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.025242] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780653, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.492256] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: d9640bf2-0f88-4c0e-9e21-7ee00ee8800f] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2405.523359] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780653, 'name': ReconfigVM_Task, 'duration_secs': 0.263617} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.523665] env[63379]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f/16498ce6-8961-408b-8d2a-c61f83f5a56f.vmdk or device None with type streamOptimized {{(pid=63379) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2405.524308] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07873da6-8d8f-4d62-86e6-aa2f67521b05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.530445] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2405.530445] env[63379]: value = "task-1780654" [ 2405.530445] env[63379]: _type = "Task" [ 2405.530445] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.538173] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780654, 'name': Rename_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.996042] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 18498244-3385-47dd-8810-b0cc731c3966] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2406.039979] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780654, 'name': Rename_Task, 'duration_secs': 0.133801} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.040280] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powering on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2406.040519] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5df35f3-05a9-4cba-9542-de2010e7e486 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.046579] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2406.046579] env[63379]: value = "task-1780655" [ 2406.046579] env[63379]: _type = "Task" [ 2406.046579] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.053828] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.499888] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 7f0c426b-1ce3-469f-8ee1-6dd2178f014e] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2406.556200] env[63379]: DEBUG oslo_vmware.api [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780655, 'name': PowerOnVM_Task, 'duration_secs': 0.412908} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.556467] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powered on the VM {{(pid=63379) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2406.658355] env[63379]: DEBUG nova.compute.manager [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Checking state {{(pid=63379) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2406.659364] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56dc470-a5e0-4cb0-b1c0-731b5a79c3d2 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.003598] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] [instance: 85ecb409-ab53-43d9-8120-2f8c7402d74c] Instance has had 0 of 5 cleanup attempts {{(pid=63379) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11287}} [ 2407.179126] env[63379]: DEBUG oslo_concurrency.lockutils [None req-bb27d612-9a24-4a02-a86a-350ea6e5a530 tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.713s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2414.965453] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._sync_power_states {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.469388] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Getting list of instances from cluster (obj){ [ 2415.469388] env[63379]: value = "domain-c8" [ 2415.469388] env[63379]: _type = "ClusterComputeResource" [ 2415.469388] env[63379]: } {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2415.470572] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b098fc3-d3f0-48e5-92be-76eae5e4d3cb {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.480226] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-76609179-3ebc-4316-8203-21a64671102b None None] Got total of 1 instances {{(pid=63379) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2415.480401] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Triggering sync for uuid 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10384}} [ 2415.480748] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2415.480980] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2415.481276] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.481415] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Cleaning up deleted instances with incomplete migration {{(pid=63379) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11312}} [ 2415.483152] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e33be67-59eb-4fea-acc4-e4bc87760d05 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.991899] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2440.469271] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager.update_available_resource {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2440.972651] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2440.972919] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2440.973065] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2440.973230] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63379) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2440.974176] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4ee7a4-1fed-4753-b0ec-ed891728ed11 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.982448] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80254dd4-b5d8-40b8-b173-47aa742d23a3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2440.996509] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd539dc2-797e-46f6-9b74-9feff4c35736 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.002559] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b478d9-4af1-4e6f-876f-390b26bf8d46 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.030271] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181363MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63379) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2441.030424] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.030597] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2442.115508] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Instance 16498ce6-8961-408b-8d2a-c61f83f5a56f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63379) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2442.115797] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2442.115911] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63379) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2442.140919] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40867204-9d07-428a-b721-abed508fe00a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.148263] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb38467-c31c-470e-b803-4d7694d6ae4f {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.177200] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f06fe9-342d-4972-a02f-e93111f171c3 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.184033] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08988aab-34a2-4906-9f4c-8fa7ea3c37cc {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.196680] env[63379]: DEBUG nova.compute.provider_tree [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2442.700102] env[63379]: DEBUG nova.scheduler.client.report [None req-76609179-3ebc-4316-8203-21a64671102b None None] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2443.204602] env[63379]: DEBUG nova.compute.resource_tracker [None req-76609179-3ebc-4316-8203-21a64671102b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63379) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2443.205012] env[63379]: DEBUG oslo_concurrency.lockutils [None req-76609179-3ebc-4316-8203-21a64671102b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.174s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2443.490855] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2443.491153] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2443.491533] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2443.491625] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2443.491754] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2443.495362] env[63379]: INFO nova.compute.manager [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Terminating instance [ 2443.497152] env[63379]: DEBUG nova.compute.manager [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Start destroying the instance on the hypervisor. {{(pid=63379) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2443.497355] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Destroying instance {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2443.498199] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42b101e-a597-4532-981b-a735ea7e2e40 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2443.506333] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powering off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2443.506567] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a9aa9ee-b247-45fb-8bcf-954476f9c942 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2443.512490] env[63379]: DEBUG oslo_vmware.api [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2443.512490] env[63379]: value = "task-1780656" [ 2443.512490] env[63379]: _type = "Task" [ 2443.512490] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2443.521416] env[63379]: DEBUG oslo_vmware.api [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2443.700334] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2443.700591] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2443.700740] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2443.963935] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2443.964140] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63379) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10593}} [ 2444.022242] env[63379]: DEBUG oslo_vmware.api [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780656, 'name': PowerOffVM_Task, 'duration_secs': 0.157805} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2444.022523] env[63379]: DEBUG nova.virt.vmwareapi.vm_util [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Powered off the VM {{(pid=63379) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2444.022692] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Unregistering the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2444.022924] env[63379]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7958d38f-4be9-4659-a25b-ff3792250a3d {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.091420] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Unregistered the VM {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2444.091661] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Deleting contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2444.091857] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleting the datastore file [datastore1] 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2444.092146] env[63379]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e3370e5-19ac-4206-b1b1-d3dc8cc4c82b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.099248] env[63379]: DEBUG oslo_vmware.api [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for the task: (returnval){ [ 2444.099248] env[63379]: value = "task-1780658" [ 2444.099248] env[63379]: _type = "Task" [ 2444.099248] env[63379]: } to complete. {{(pid=63379) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2444.106329] env[63379]: DEBUG oslo_vmware.api [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2444.608972] env[63379]: DEBUG oslo_vmware.api [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Task: {'id': task-1780658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305104} completed successfully. {{(pid=63379) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2444.609371] env[63379]: DEBUG nova.virt.vmwareapi.ds_util [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted the datastore file {{(pid=63379) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2444.609478] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Deleted contents of the VM from datastore datastore1 {{(pid=63379) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2444.609604] env[63379]: DEBUG nova.virt.vmwareapi.vmops [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Instance destroyed {{(pid=63379) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2444.609788] env[63379]: INFO nova.compute.manager [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2444.610065] env[63379]: DEBUG oslo.service.loopingcall [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63379) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2444.610267] env[63379]: DEBUG nova.compute.manager [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Deallocating network for instance {{(pid=63379) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2444.610371] env[63379]: DEBUG nova.network.neutron [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] deallocate_for_instance() {{(pid=63379) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2445.037020] env[63379]: DEBUG nova.compute.manager [req-928c9ee9-5413-4b7e-917d-f61d386a233d req-90cd6816-3762-486a-80b1-275adcf2635a service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Received event network-vif-deleted-36b0f993-b954-495e-9deb-0a3125b518eb {{(pid=63379) external_instance_event /opt/stack/nova/nova/compute/manager.py:11177}} [ 2445.037020] env[63379]: INFO nova.compute.manager [req-928c9ee9-5413-4b7e-917d-f61d386a233d req-90cd6816-3762-486a-80b1-275adcf2635a service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Neutron deleted interface 36b0f993-b954-495e-9deb-0a3125b518eb; detaching it from the instance and deleting it from the info cache [ 2445.037020] env[63379]: DEBUG nova.network.neutron [req-928c9ee9-5413-4b7e-917d-f61d386a233d req-90cd6816-3762-486a-80b1-275adcf2635a service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2445.514601] env[63379]: DEBUG nova.network.neutron [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Updating instance_info_cache with network_info: [] {{(pid=63379) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2445.540041] env[63379]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62b98088-f655-44fc-852a-767e3291ba5a {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.550238] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6789b47-bbe0-4c70-b899-d3ae3a2d1ed7 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.573781] env[63379]: DEBUG nova.compute.manager [req-928c9ee9-5413-4b7e-917d-f61d386a233d req-90cd6816-3762-486a-80b1-275adcf2635a service nova] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Detach interface failed, port_id=36b0f993-b954-495e-9deb-0a3125b518eb, reason: Instance 16498ce6-8961-408b-8d2a-c61f83f5a56f could not be found. {{(pid=63379) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11011}} [ 2446.018094] env[63379]: INFO nova.compute.manager [-] [instance: 16498ce6-8961-408b-8d2a-c61f83f5a56f] Took 1.41 seconds to deallocate network for instance. [ 2446.523842] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.524134] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.524389] env[63379]: DEBUG nova.objects.instance [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lazy-loading 'resources' on Instance uuid 16498ce6-8961-408b-8d2a-c61f83f5a56f {{(pid=63379) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2447.058734] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9f81ec-24d8-4058-8dd9-a4d73cebbd7e {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.066627] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3054fb4d-c43d-4aa5-9611-b677b425c630 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.096527] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0395c544-ba43-457b-b3f3-a3d26819383b {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.103581] env[63379]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab6f606-8d48-45fb-9c49-fd7d4cef6c48 {{(pid=63379) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2447.116377] env[63379]: DEBUG nova.compute.provider_tree [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed in ProviderTree for provider: cf478c89-515f-4372-b90f-4868ab56e978 {{(pid=63379) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2447.619824] env[63379]: DEBUG nova.scheduler.client.report [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Inventory has not changed for provider cf478c89-515f-4372-b90f-4868ab56e978 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63379) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2448.125080] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2448.145667] env[63379]: INFO nova.scheduler.client.report [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Deleted allocations for instance 16498ce6-8961-408b-8d2a-c61f83f5a56f [ 2448.654650] env[63379]: DEBUG oslo_concurrency.lockutils [None req-99ac7e94-e8b9-440d-89a5-61659de98cea tempest-AttachVolumeShelveTestJSON-313583966 tempest-AttachVolumeShelveTestJSON-313583966-project-member] Lock "16498ce6-8961-408b-8d2a-c61f83f5a56f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.163s {{(pid=63379) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2449.965036] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2450.964430] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2450.964630] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Starting heal instance info cache {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9974}} [ 2450.964758] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Rebuilding the list of instances to heal {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 2451.468358] env[63379]: DEBUG nova.compute.manager [None req-76609179-3ebc-4316-8203-21a64671102b None None] Didn't find any instances for network info cache update. {{(pid=63379) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10060}} [ 2451.468757] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2456.964505] env[63379]: DEBUG oslo_service.periodic_task [None req-76609179-3ebc-4316-8203-21a64671102b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63379) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}